Beispiel #1
1
def test_union_all_and_compose_all():
    K3=nx.complete_graph(3)
    P3=nx.path_graph(3)

    G1=nx.DiGraph()
    G1.add_edge('A','B')
    G1.add_edge('A','C')
    G1.add_edge('A','D')
    G2=nx.DiGraph()
    G2.add_edge('1','2')
    G2.add_edge('1','3')
    G2.add_edge('1','4')

    G=nx.union_all([G1,G2])
    H=nx.compose_all([G1,G2])
    assert_edges_equal(G.edges(),H.edges())
    assert_false(G.has_edge('A','1'))
    assert_raises(nx.NetworkXError, nx.union, K3, P3)
    H1=nx.union_all([H,G1],rename=('H','G1'))
    assert_equal(sorted(H1.nodes()),
        ['G1A', 'G1B', 'G1C', 'G1D',
         'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])

    H2=nx.union_all([H,G2],rename=("H",""))
    assert_equal(sorted(H2.nodes()),
        ['1', '2', '3', '4',
         'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])

    assert_false(H1.has_edge('NB','NA'))

    G=nx.compose_all([G,G])
    assert_edges_equal(G.edges(),H.edges())

    G2=nx.union_all([G2,G2],rename=('','copy'))
    assert_equal(sorted(G2.nodes()),
        ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4'])

    assert_equal(G2.neighbors('copy4'),[])
    assert_equal(sorted(G2.neighbors('copy1')),['copy2', 'copy3', 'copy4'])
    assert_equal(len(G),8)
    assert_equal(nx.number_of_edges(G),6)

    E=nx.disjoint_union_all([G,G])
    assert_equal(len(E),16)
    assert_equal(nx.number_of_edges(E),12)

    E=nx.disjoint_union_all([G1,G2])
    assert_equal(sorted(E.nodes()),[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])

    G1=nx.DiGraph()
    G1.add_edge('A','B')
    G2=nx.DiGraph()
    G2.add_edge(1,2)
    G3=nx.DiGraph()
    G3.add_edge(11,22)
    G4=nx.union_all([G1,G2,G3],rename=("G1","G2","G3"))
    assert_equal(sorted(G4.nodes()),
        ['G1A', 'G1B', 'G21', 'G22',
         'G311', 'G322'])
Beispiel #2
0
def test_union_all_and_compose_all():
    K3 = nx.complete_graph(3)
    P3 = nx.path_graph(3)

    G1 = nx.DiGraph()
    G1.add_edge('A', 'B')
    G1.add_edge('A', 'C')
    G1.add_edge('A', 'D')
    G2 = nx.DiGraph()
    G2.add_edge('1', '2')
    G2.add_edge('1', '3')
    G2.add_edge('1', '4')

    G = nx.union_all([G1, G2])
    H = nx.compose_all([G1, G2])
    assert_edges_equal(G.edges(), H.edges())
    assert not G.has_edge('A', '1')
    pytest.raises(nx.NetworkXError, nx.union, K3, P3)
    H1 = nx.union_all([H, G1], rename=('H', 'G1'))
    assert (sorted(H1.nodes()) ==
            ['G1A', 'G1B', 'G1C', 'G1D',
             'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])

    H2 = nx.union_all([H, G2], rename=("H", ""))
    assert (sorted(H2.nodes()) ==
            ['1', '2', '3', '4',
                  'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD'])

    assert not H1.has_edge('NB', 'NA')

    G = nx.compose_all([G, G])
    assert_edges_equal(G.edges(), H.edges())

    G2 = nx.union_all([G2, G2], rename=('', 'copy'))
    assert (sorted(G2.nodes()) ==
            ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4'])

    assert sorted(G2.neighbors('copy4')) == []
    assert sorted(G2.neighbors('copy1')) == ['copy2', 'copy3', 'copy4']
    assert len(G) == 8
    assert nx.number_of_edges(G) == 6

    E = nx.disjoint_union_all([G, G])
    assert len(E) == 16
    assert nx.number_of_edges(E) == 12

    E = nx.disjoint_union_all([G1, G2])
    assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]

    G1 = nx.DiGraph()
    G1.add_edge('A', 'B')
    G2 = nx.DiGraph()
    G2.add_edge(1, 2)
    G3 = nx.DiGraph()
    G3.add_edge(11, 22)
    G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3"))
    assert (sorted(G4.nodes()) ==
            ['G1A', 'G1B', 'G21', 'G22',
             'G311', 'G322'])
Beispiel #3
0
def gen_graphs(sizes, p_community=0.5, p_intra=0.7, p_inter=0.01):
    """
    Generate community graphs.
    """
    A = []
    for V in tqdm(sizes):
        if np.random.rand() < p_community:
            comms = [
                nx.gnp_random_graph(V // 4, p_intra),
                nx.gnp_random_graph(V // 4, p_intra),
                nx.gnp_random_graph(V // 4, p_intra),
                nx.gnp_random_graph(V - 3 * (V // 4), p_intra)
            ]
            graph = nx.disjoint_union_all(comms)
            graph = nx.to_numpy_array(graph)
            block1 = np.arange(V // 4)
            block2 = np.arange(V // 4, 2 * (V // 4))
            block3 = np.arange(2 * (V // 4), 3 * (V // 4))
            block4 = np.arange(3 * (V // 4), V)
            edges = [
                list(itertools.product(block1, block2)),
                list(itertools.product(block1, block3)),
                list(itertools.product(block1, block4)),
                list(itertools.product(block2, block3)),
                list(itertools.product(block2, block4)),
                list(itertools.product(block3, block4)),
            ]
            for (i, j) in (edges[0][0], edges[1][1], edges[4][2]):
                graph[i, j], graph[j, i] = 1, 1
            remaining = list(itertools.chain(edges))
            np.random.shuffle(remaining)
            for (i, j) in remaining[:int(p_inter * V)]:
                graph[i, j], graph[j, i] = 1, 1
        else:
            comms = [
                nx.gnp_random_graph(V // 2, p_intra),
                nx.gnp_random_graph((V + 1) // 2, p_intra)
            ]
            graph = nx.disjoint_union_all(comms)
            graph = nx.to_numpy_array(graph)
            block1 = np.arange(V // 2)
            block2 = np.arange(V // 2, V)
            remaining = list(itertools.product(block1, block2))
            np.random.shuffle(remaining)
            for (i, j) in remaining[:int(p_inter * V + 1)]:
                graph[i, j], graph[j, i] = 1, 1
        P = np.eye(V)
        np.random.shuffle(P)
        graph = P.T @ graph @ P
        if nx.number_connected_components(nx.from_numpy_array(graph)) > 1:
            sizes = sizes + [V]
            continue
        A.append(graph)
    return np.array(A)
Beispiel #4
0
def test_union_all_and_compose_all():
    K3 = nx.complete_graph(3)
    P3 = nx.path_graph(3)

    G1 = nx.DiGraph()
    G1.add_edge("A", "B")
    G1.add_edge("A", "C")
    G1.add_edge("A", "D")
    G2 = nx.DiGraph()
    G2.add_edge("1", "2")
    G2.add_edge("1", "3")
    G2.add_edge("1", "4")

    G = nx.union_all([G1, G2])
    H = nx.compose_all([G1, G2])
    assert_edges_equal(G.edges(), H.edges())
    assert_false(G.has_edge("A", "1"))
    assert_raises(nx.NetworkXError, nx.union, K3, P3)
    H1 = nx.union_all([H, G1], rename=("H", "G1"))
    assert_equal(sorted(H1.nodes()), ["G1A", "G1B", "G1C", "G1D", "H1", "H2", "H3", "H4", "HA", "HB", "HC", "HD"])

    H2 = nx.union_all([H, G2], rename=("H", ""))
    assert_equal(sorted(H2.nodes()), ["1", "2", "3", "4", "H1", "H2", "H3", "H4", "HA", "HB", "HC", "HD"])

    assert_false(H1.has_edge("NB", "NA"))

    G = nx.compose_all([G, G])
    assert_edges_equal(G.edges(), H.edges())

    G2 = nx.union_all([G2, G2], rename=("", "copy"))
    assert_equal(sorted(G2.nodes()), ["1", "2", "3", "4", "copy1", "copy2", "copy3", "copy4"])

    assert_equal(G2.neighbors("copy4"), [])
    assert_equal(sorted(G2.neighbors("copy1")), ["copy2", "copy3", "copy4"])
    assert_equal(len(G), 8)
    assert_equal(nx.number_of_edges(G), 6)

    E = nx.disjoint_union_all([G, G])
    assert_equal(len(E), 16)
    assert_equal(nx.number_of_edges(E), 12)

    E = nx.disjoint_union_all([G1, G2])
    assert_equal(sorted(E.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])

    G1 = nx.DiGraph()
    G1.add_edge("A", "B")
    G2 = nx.DiGraph()
    G2.add_edge(1, 2)
    G3 = nx.DiGraph()
    G3.add_edge(11, 22)
    G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3"))
    assert_equal(sorted(G4.nodes()), ["G1A", "G1B", "G21", "G22", "G311", "G322"])
Beispiel #5
0
def test_is_berge(pool : Pool, alt=False):
    
    if alt:
        from alternate import is_berge_alt as is_berge
    else:
        from berge import is_berge

    # Note that graphs are perfect iff they are Berge

    for i in range(5):
        # Bipartite graphs are always Berge

        n1, n2 = random.randint(1, 12), random.randint(1, 12)
        
        graph = random_bipartite(n1=n1, n2=n2, p=.4)

        assert(is_berge(graph, pool=pool))

    for i in range(5):
        graph = nx.line_graph(random_bipartite(n1=10, n2=10, p=.15))

        # Line graphs of bipartite graphs are perfect by Konig's theorem

        assert(is_berge(graph, pool=pool))

    for i in range(10, 15):
        assert(is_berge(nx.complete_graph(i),  pool=pool))

    for i in range(5):

        # Make sure we work properly on disconnected graphs

        graph = nx.disjoint_union_all([
            random_bipartite(
                random.randint(1, 6), 
                random.randint(1, 6), .2) 
            for i in range(3)])

        assert(is_berge(graph,  pool=pool))

    for i in range(5):
        m = random.randint(2, 12)

        graph = nx.triangular_lattice_graph(m, 2)

        assert(is_berge(graph, pool=pool))

    for i in range(5):
        n = random.randint(4, 20)

        graph = random_chordal(n, .2)

        assert(is_berge(graph, pool=pool))

    for i in range(10):
        n = random.randint(4, 20)

        graph = nx.cycle_graph(n)

        assert(is_berge(graph, pool=pool) == (n % 2 == 0))
Beispiel #6
0
def n_community(c_sizes, p_inter=0.01):
    graphs = [
        nx.gnp_random_graph(c_sizes[i], 0.7, seed=i)
        for i in range(len(c_sizes))
    ]
    G = nx.disjoint_union_all(graphs)
    communities = list(nx.connected_component_subgraphs(G))
    #communities = list(G.subgraph(c) for c in nx.connected_components(G))[0]
    for i in range(len(communities)):
        subG1 = communities[i]
        nodes1 = list(subG1.nodes())
        for j in range(i + 1, len(communities)):
            subG2 = communities[j]
            nodes2 = list(subG2.nodes())
            has_inter_edge = False
            for n1 in nodes1:
                for n2 in nodes2:
                    if np.random.rand() < p_inter:
                        G.add_edge(n1, n2)
                        has_inter_edge = True
            if not has_inter_edge:
                G.add_edge(nodes1[0], nodes2[0])
    #print('connected comp: ', len(list(nx.connected_component_subgraphs(G))))
    G = G.to_directed()
    G = gl.networkx2graph(G)
    return G
Beispiel #7
0
 def __init__(self,
              data: List[MolecularGraph2D],
              concentration: List[float] = None,
              graph_type: Literal['single_graph',
                                  'multi_graph'] = 'single_graph'):
     # read data point
     self.data = data
     # features_mol set None
     self.features_mol = None
     # set concentration
     if concentration is None:
         self.concentration = [1.0] * len(data)
     else:
         self.concentration = concentration
     graphs = [d.graph for d in self.data]
     map(lambda x, y: x.update_concentration(y), graphs, self.concentration)
     # set graph
     self.graph_type = graph_type
     if graph_type == 'single_graph':
         # combine several graphs into a disconnected graph
         self.graph = nx.disjoint_union_all(
             [g.to_networkx() for g in graphs])
         self.graph = _from_networkx(HashGraph, self.graph)
         self.graph.normalize_concentration()
     else:
         self.graph = [
             rv for r in zip(graphs, self.concentration) for rv in r
         ]
Beispiel #8
0
    def add_layer(self, layer, **attr):
        if self.num_nodes_in_layers is 0:
            self.list_of_layers = [layer]
        else:
            self.list_of_layers.append(layer)

        self.num_layers = len(self.list_of_layers)
        self.num_nodes_in_layers = self.list_of_layers[0].number_of_nodes()

        for i, j in layer.edges():
            self.intra_layer_edges.append(
                (i + (len(self.list_of_layers) - 1) * layer.number_of_nodes(),
                 j + (len(self.list_of_layers) - 1) * layer.number_of_nodes()))

        try:
            Graph.__init__(
                self, Graph(disjoint_union_all(self.list_of_layers), **attr))
        except multinetxError:
            raise multinetxError("Multiplex cannot inherit Graph properly")

        ## Check if all graphs have the same number of nodes
        for lg in self.list_of_layers:
            try:
                assert (lg.number_of_nodes() == self.num_nodes_in_layers)
            except AssertionError:
                raise multinetxError(
                    "Graph at layer does not have the same number of nodes")
Beispiel #9
0
def evaluate(submission):
    parts = (
        [nx.complete_graph(10) for _ in range(3)] +
        [nx.cycle_graph(10) for _ in range(3)]
    )

    graph = nx.disjoint_union_all(parts)

    N = len(graph.nodes())
    assert list(graph.nodes()) == list(range(N))  # check nodes are zero-based

    Q = 10
    D = [graph.degree(u) for u in graph.nodes()]
    adj = [list(graph.neighbors(u)) for u in graph.nodes()]

    with submission.run(global_variables=dict(N=N, Q=Q, D=D, adj=adj)) as p:

        memory_usage = p.sandbox.get_info().memory_usage
        print(f"Memory usage: {memory_usage} bytes")

        for _ in range(Q):
            u = v = None
            while u == v:
                u, v = random.randint(0, N - 1), random.randint(0, N - 1)

            connected = bool(p.call.is_there_a_path(u, v))
            if nx.has_path(graph, u, v) == connected:
                print("Correct")
            else:
                print("Wrong")
            print(f"Nodes {u} {v} -> {connected}")
Beispiel #10
0
def ncommunity(c_sizes, graph_size, p_inter=0.1, p_intera=0.4):
    graphs = [
        nx.gnp_random_graph(c_sizes[i], p_intera, seed=i)
        for i in range(len(c_sizes))
    ]
    G = nx.disjoint_union_all(graphs)
    communities = list(nx.connected_components(G))
    for i in range(len(communities)):
        subG1 = communities[i]
        nodes1 = list(subG1)
        for j in range(i + 1, len(communities)):
            subG2 = communities[j]
            nodes2 = list(subG2)
            has_inter_edge = False
            for n1 in nodes1:
                for n2 in nodes2:
                    if np.random.rand() < p_inter:
                        G.add_edge(n1, n2)
                        has_inter_edge = True
            if not has_inter_edge:
                G.add_edge(nodes1[0], nodes2[0])

    x = list(range(graph_size))
    random.shuffle(x)

    if (len(G) > graph_size):
        G.add_nodes_from([i for i in range(len(G), graph_size)])
    mapping = {k: v for k, v in zip(list(range(graph_size)), x)}
    G = nx.relabel_nodes(G, mapping)
    return G
Beispiel #11
0
def gen_graphs(sizes, p_intra=0.7, p_inter=0.01):
    """
    Generate community graphs.
    """
    A = []
    for V in tqdm(sizes):
        comms = [
            nx.gnp_random_graph(V // 2, p_intra),
            nx.gnp_random_graph((V + 1) // 2, p_intra)
        ]
        graph = nx.disjoint_union_all(comms)
        graph = nx.to_numpy_array(graph)
        block1 = np.arange(V // 2)
        block2 = np.arange(V // 2, V)
        remaining = list(itertools.product(block1, block2))
        np.random.shuffle(remaining)
        for (i, j) in remaining[:int(p_inter * V + 1)]:
            graph[i, j], graph[j, i] = 1, 1
        P = np.eye(V)
        np.random.shuffle(P)
        graph = P.T @ graph @ P
        if nx.number_connected_components(nx.from_numpy_array(graph)) > 1:
            sizes = sizes + [V]
            continue
        A.append(graph)
    return np.array(A)
Beispiel #12
0
def build_test_graph(z_in, z_out, num_groups=4, group_size=32):
    """Return a test graph of random equal sized community groups

    Parameters:
    -----------
    z_in -- average number of edges from a node to nodes in the same community
    z_out -- average number of edges from a node to nodes in other communities
    """
    # Create groups and edges within groups
    groups = []
    p_in = z_in / (group_size - 1)
    for i in range(num_groups):
        group = nx.erdos_renyi_graph(group_size, p_in)
        nx.set_node_attributes(group,
                               dict([(node, i) for node in group.nodes()]),
                               'group_id')
        groups.append(group)

    G = nx.disjoint_union_all(groups)

    # Create edges between groups
    p_out = z_out / ((num_groups - 1) * group_size)
    edges = itertools.combinations(G.nodes(), 2)
    for i, j in edges:
        if G.node[i]['group_id'] != G.node[j]['group_id'] and random.random(
        ) < p_out:
            G.add_edge(i, j)

    return G
Beispiel #13
0
    def add_layer(self, layer, **attr):
        if self.num_nodes_in_layers is 0:
            self.list_of_layers=[layer]
        else:
            self.list_of_layers.append(layer)
            
        self.num_layers = len(self.list_of_layers)
        self.num_nodes_in_layers = self.list_of_layers[0].number_of_nodes()
        
        for i,j in layer.edges():
			self.intra_layer_edges.append((
			i+(len(self.list_of_layers)-1)*layer.number_of_nodes(),
			j+(len(self.list_of_layers)-1)*layer.number_of_nodes()))
			
        try:
            Graph.__init__(self,
                        Graph(disjoint_union_all(self.list_of_layers),
                        **attr))
        except multinetxError:
            raise multinetxError("Multiplex cannot inherit Graph properly")

        ## Check if all graphs have the same number of nodes
        for lg in self.list_of_layers:
            try:
                assert(lg.number_of_nodes() == self.num_nodes_in_layers)
            except AssertionError:
                raise multinetxError("Graph at layer does not have the same number of nodes")  
Beispiel #14
0
def random_sbm(n=1000, c=10, p_w=0.1, p_b=0.01, std=30, seed=None):
    '''
    Draws an SBM with given parameters

    n: Number of nodes(Approx.)
    c: Number of communities.
    std: standard deviation of normal distribution of community size.
    p_w: within community edge probability.
    p_b: between communities edge probability.
    '''
    rg = np.random.RandomState(seed)
    com_size = rg.normal(loc=(n / c), scale=std, size=c)
    #com_size = (com_size/np.sum(com_size))*n
    com_size = com_size.astype(int)

    com = [
        nx.gnp_random_graph(n=x, p=p_w, seed=rg.randint(1000))
        for x in com_size
    ]

    for i in range(c):
        nx.set_node_attributes(com[i], i, 'n')

    G = nx.disjoint_union_all(com)

    for u in G.nodes():
        for v in G.nodes():
            if G.nodes[u]['n'] != G.nodes[v]['n'] and rg.rand() < p_b:
                G.add_edge(u, v)

    return G
    def __init__(self, query_data):
        self.relation = query_data.setdefault('relation', list())
        self.entity = query_data.setdefault('entity', list())
        self.intent = query_data.setdefault('intent', 'PERSON')

        self.relation_component_list = list()
        self.entity_component_list = list()
        # 获取实体和关系对应的子图组件
        self.init_relation_component()
        self.init_entity_component()
        # 得到子图组件构成的集合,用图表示
        self.disconnected_graph = nx.disjoint_union_all(
            self.relation_component_list + self.entity_component_list)
        self.query_graph = copy.deepcopy(self.disconnected_graph)
        self.old_query_graph = copy.deepcopy(self.disconnected_graph)

        self.node_type_dict = dict()
        self.node_type_statistic()
        self.component_assemble()

        while len(self.query_graph.nodes) != len(self.old_query_graph.nodes) \
                and not nx.algorithms.is_weakly_connected(self.query_graph):
            # 节点一样多说明上一轮没有合并
            # 图已连通也不用合并
            self.old_query_graph = copy.deepcopy(self.query_graph)
            self.node_type_dict = dict()
            self.node_type_statistic()
            self.component_assemble()
        self.add_intention()
Beispiel #16
0
def randomGraph(n):
	graphs = []
	# TODO: Allow flexibility in inputs
	if n == 50:
		subNodes = [15, 15, 10, 5, 5]
		edgeProbs = [0.35, 0.30, 0.40, 0.35, 0.45]
		numSubGraphs = len(subNodes)
	elif n == 500:
		subNodes = [75, 75, 75, 50, 50, 50, 50, 25, 25, 25]
		edgeProbs = [0.35, 0.30, 0.40, 0.35, 0.45, 0.3, 0.2, 0.35, 0.3, 0.4]
		numSubGraphs = len(subNodes)
	elif n == 1000:
		subNodes = [100, 100]
		subNodes.extend([75 for i in range(4)])
		subNodes.extend([25 for i in range(5)])
		subNodes.extend([15 for i in range(15)])
		subNodes.extend([10 for i in range(15)])
		numSubGraphs = len(subNodes)
		edgeProbs = [random.uniform(0.2, 0.45) for i in range(numSubGraphs)]
	for i in range(numSubGraphs):
		A = nx.gnp_random_graph(subNodes[i], edgeProbs[i])
		graphs.append(A)
	G = nx.disjoint_union_all(graphs)
	G = nx.convert_node_labels_to_integers(G, first_label=1)
	G = nx.relabel_nodes(G, lambda x: str(x))
	return G
Beispiel #17
0
def test_input_output():
    l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)])]
    U = nx.disjoint_union_all(l)
    assert len(l) == 2
    C = nx.compose_all(l)
    assert len(l) == 2
    l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])]
    R = nx.intersection_all(l)
    assert len(l) == 2
Beispiel #18
0
def test_input_output():
    l = [nx.Graph([(1,2)]),nx.Graph([(3,4)])]
    U = nx.disjoint_union_all(l)
    assert_equal(len(l),2)
    C = nx.compose_all(l)
    assert_equal(len(l),2)
    l = [nx.Graph([(1,2)]),nx.Graph([(1,2)])]
    R = nx.intersection_all(l)
    assert_equal(len(l),2)
def plot_small_p3_editing_example():
    np.random.seed(1)

    G_edited = nx.disjoint_union_all([nx.complete_graph(n) for n in [3, 4]])
    G_edited = nx.relabel_nodes(G_edited,
                                dict(zip(G_edited, string.ascii_lowercase)))
    edits = [("b", "g"), ("d", "e")]

    deletions = set(edits) - set(G_edited.edges())
    inserts = set(edits) - deletions

    G = G_edited.copy()
    G.add_edges_from(deletions)
    G.remove_edges_from(inserts)

    graphs = [
        ("P3-editing-example-G", G, set(G.edges()) - set(edits), inserts,
         deletions, [("b", "e")], [("b", "e"), ("b", "g"), ("e", "g")
                                   ]),  # ("d", "e"), ("e", "f"), ("d", "f")
        ("P3-editing-example-G-edited", G_edited, G_edited.edges(), (), (), (),
         ())
    ]

    for name, G, unedited, inserted, deleted, non_edges, fat in graphs:
        fig, ax = plt.subplots(figsize=(4, 4))
        ax.margins(0.15)
        ax.set_aspect("equal")
        ax.axis(False)

        def width(edges):
            return [
                3 if e in fat else 2 if e in inserted else 1 for e in edges
            ]

        pos = graphviz_layout(G, prog="neato")
        nx.draw_networkx_nodes(G,
                               pos,
                               node_color="lightgrey",
                               node_size=400,
                               ax=ax)
        nx.draw_networkx_labels(G, pos, labels={v: f"${v}$" for v in G}, ax=ax)

        edge_lists = [(unedited, "black", "solid"),
                      (inserted, "limegreen", "dashed"),
                      (deleted, "red", "solid"), (non_edges, "grey", "dashed")]

        for edgelist, edge_color, style in edge_lists:
            nx.draw_networkx_edges(G,
                                   pos,
                                   edgelist=edgelist,
                                   edge_color=edge_color,
                                   style=style,
                                   ax=ax,
                                   width=width(edgelist))

        fig.tight_layout()
        plt.savefig(f"{name}.pdf", bbox_inches="tight", pad_inches=0)
Beispiel #20
0
def barthelemy_benchmark(n_er, prob, n_cliques, k_cliques):
    graph = nx.connected_component_subgraphs(nx.erdos_renyi_graph(n_er,
                                                                  prob))[0]
    graph_k = nx.disjoint_union_all(
        [nx.complete_graph(k_cliques) for i in range(0, n_cliques)])
    graph = nx.disjoint_union(graph, graph_k).copy()
    for i in range(0, n_cliques):
        graph.add_edge(np.random.randint(n_er), n_er + i * n_cliques)
    return graph
Beispiel #21
0
def gen_2hier(num_graphs, num_clusters, n, m_range, inter_prob1, inter_prob2,
              feat_gen):
    ''' Each community is a BA graph.
    Args:
        inter_prob1: probability of one node connecting to any node in the other community within
            the large cluster.
        inter_prob2: probability of one node connecting to any node in the other community between
            the large cluster.
    '''
    graphs = []

    for i in range(num_graphs):
        clusters2 = []
        for j in range(len(num_clusters)):
            clusters = gen_er(range(n, n + 1), 0.5, num_clusters[j],
                              feat_gen[0])
            G = nx.disjoint_union_all(clusters)
            for u1 in range(G.number_of_nodes()):
                if np.random.rand() < inter_prob1:
                    target = np.random.choice(G.number_of_nodes() - n)
                    # move one cluster after to make sure it's not an intra-cluster edge
                    if target // n >= u1 // n:
                        target += n
                    G.add_edge(u1, target)
            clusters2.append(G)
        G = nx.disjoint_union_all(clusters2)
        cluster_sizes_cum = np.cumsum(
            [cluster2.number_of_nodes() for cluster2 in clusters2])
        curr_cluster = 0
        for u1 in range(G.number_of_nodes()):
            if u1 >= cluster_sizes_cum[curr_cluster]:
                curr_cluster += 1
            if np.random.rand() < inter_prob2:
                target = np.random.choice(
                    G.number_of_nodes() -
                    clusters2[curr_cluster].number_of_nodes())
                # move one cluster after to make sure it's not an intra-cluster edge
                if curr_cluster == 0 or target >= cluster_sizes_cum[
                        curr_cluster - 1]:
                    target += cluster_sizes_cum[curr_cluster]
            G.add_edge(u1, target)
        graphs.append(G)

    return graphs
def ingest_consensus(sample: str, consensus_dir: Path, transform_file: Path,
                     url: str):
    """
    Storing data in MongoDB using psuedo world coords (1,.3,.3) microns rather than the
    slightly off floats found in the transform.txt file.
    """

    mongo_graph_provider = daisy.persistence.MongoDbGraphProvider(
        f"mouselight-{sample}-consensus", url, directed=True, mode="w")
    graph = mongo_graph_provider.get_graph(
        daisy.Roi(daisy.Coordinate([None, None, None]),
                  daisy.Coordinate([None, None, None])))
    consensus_graphs = []
    for consensus_neuron in tqdm(consensus_dir.iterdir(),
                                 "Consensus neurons: "):
        if (not consensus_neuron.is_dir()
                or not (consensus_neuron / "consensus.swc").exists()):
            continue
        consensus_graph = parse_consensus(
            consensus_neuron / "consensus.swc",
            consensus_neuron / "dendrite.swc",
            transform,
            offset=np.array([0, 0, 0]),
            resolution=np.array([300, 300, 1000]),
            transpose=[2, 1, 0],
        )
        for node in consensus_graph.nodes:
            consensus_graph.nodes[node]["position"] = consensus_graph.nodes[
                node]["location"].tolist()
            del consensus_graph.nodes[node]["location"]
        consensus_graphs.append(consensus_graph)
    logger.info("Consolidating consensus graphs!")
    consensus_graph = nx.disjoint_union_all(consensus_graphs)

    data = {}
    for node_id, attrs in consensus_graph.nodes.items():
        node_id = int(np.int64(node_id))
        node_ids = data.setdefault("id", [])
        node_ids.append(node_id)
        for key, value in attrs.items():
            dlist = data.setdefault(key, [])
            dlist.append(value)

    logger.info(
        f"Writing {len(consensus_graph.nodes)} nodes and {len(consensus_graph.edges)} edges!"
    )

    bulk_write_nodes(url, f"mouselight-{sample}-consensus", "nodes", data)
    bulk_write_edges(
        url,
        f"mouselight-{sample}-consensus",
        "edges",
        ("u", "v"),
        list(consensus_graph.edges),
        True,
    )
Beispiel #23
0
def join_subtrees_to_root(g, children_sub_trees):
    """join_subtrees_to_root."""
    sizes = [len(c) for c in children_sub_trees]
    gg = nx.disjoint_union_all([g] + children_sub_trees)
    curr_size = 1
    gg.add_edge(0, curr_size)
    for size in sizes[:-1]:
        curr_size += size
        gg.add_edge(0, curr_size)
    gg.graph = dict()
    return gg
Beispiel #24
0
def ring_cliques_benchmark(n, r):
    """
    Returns a networkx graph of ring of cliques with r cliques of n nodes
    n number of nodes in every clique
    r number of connected cliques
    """
    graph = nx.disjoint_union_all([nx.complete_graph(n) for i in range(0, r)])
    graph.add_edges_from([(u, u + n + 1) for u in range(0, r * (n), n)])
    graph.remove_node(n * r + 1)
    graph.add_edge(n - 1, n * r - 1)
    return graph
def n_community(num_communities, max_nodes, p_inter=0.05):
    assert num_communities > 1

    one_community_size = max_nodes // num_communities
    c_sizes = [one_community_size] * num_communities
    total_nodes = one_community_size * num_communities
    """ 
    here we calculate `p_make_a_bridge` so that `p_inter = \mathbb{E}(Number_of_bridge_edges) / Total_number_of_nodes `
    
    To make it more clear: 
    let `M = num_communities` and `N = one_community_size`, then
    
    ```
    p_inter
    = \mathbb{E}(Number_of_bridge_edges) / Total_number_of_nodes
    = (p_make_a_bridge * C_M^2 * N^2) / (MN)  # see the code below for this derivation
    = p_make_a_bridge * (M-1) * N / 2
    ```
    
    so we have:
    """
    p_make_a_bridge = p_inter * 2 / (
        (num_communities - 1) * one_community_size)

    print(num_communities, total_nodes, end=' ')
    graphs = [
        nx.gnp_random_graph(c_sizes[i], 0.7, seed=i)
        for i in range(len(c_sizes))
    ]

    G = nx.disjoint_union_all(graphs)
    communities = list(nx.connected_component_subgraphs(G))
    add_edge = 0
    for i in range(len(communities)):
        subG1 = communities[i]
        nodes1 = list(subG1.nodes())
        for j in range(i + 1, len(communities)):  # loop for C_M^2 times
            subG2 = communities[j]
            nodes2 = list(subG2.nodes())
            has_inter_edge = False
            for n1 in nodes1:  # loop for N times
                for n2 in nodes2:  # loop for N times
                    if np.random.rand() < p_make_a_bridge:
                        G.add_edge(n1, n2)
                        has_inter_edge = True
                        add_edge += 1
            if not has_inter_edge:
                G.add_edge(nodes1[0], nodes2[0])
                add_edge += 1
    print('connected comp: ', len(list(nx.connected_component_subgraphs(G))),
          'add edges: ', add_edge)
    print(G.number_of_edges())
    return G
Beispiel #26
0
def plot_graphs(flname, graphs):
    plt.clf()
    G = nx.disjoint_union_all(graphs)
    c = [random.random() for i in xrange(nx.number_of_nodes(G))]
    nx.draw(G,
            pos=nx.nx_pydot.pydot_layout(G, prog="neato"),
            node_size=50,
            node_color=c,
            vmin=0.0,
            vmax=1.0,
            cmap=plt.get_cmap("Vega20c"))
    plt.savefig(flname, DPI=200)
Beispiel #27
0
    def __init__(self, query_data, dependency=None):
        logger.info('Query Parsing...')
        self.relation = query_data.setdefault('relation', list())
        self.entity = query_data.setdefault('entity', list())
        self.intent = query_data['intent']
        self.dependency = dependency
        self.relation_component_list = list()
        self.entity_component_list = list()
        # 获取实体和关系对应的子图组件
        self.init_relation_component()
        self.init_entity_component()

        # 若有依存分析,根据依存分析来获取组件图
        if self.dependency and len(self.dependency) > 0:
            logger.info('dependency exist.')
            print('dependency exist.')
            dm = DepMap(query_data['dependency'], self.relation_component_list,
                        self.entity_component_list)
            if dm.check_dep():
                # 使用依存分析,获取self.component_graph
                if nx.algorithms.is_weakly_connected(dm.dep_graph):
                    self.query_graph = dm.dep_graph
                    self.determine_intention()
                    return
                else:
                    logger.info('dependency wrong!')
        # 得到子图组件构成的集合,用图表示
        self.component_graph = nx.disjoint_union_all(
            self.relation_component_list + self.entity_component_list)
        self.query_graph = copy.deepcopy(self.component_graph)
        self.query_graph = Graph(self.query_graph)
        self.old_query_graph = copy.deepcopy(self.component_graph)

        self.node_type_dict = self.query_graph.node_type_statistic()
        self.component_assemble()

        while len(self.query_graph.nodes) != len(self.old_query_graph.nodes) \
                and not nx.algorithms.is_weakly_connected(self.query_graph):
            # 节点一样多说明上一轮没有合并
            # 图已连通也不用合并
            self.old_query_graph = copy.deepcopy(self.query_graph)
            self.node_type_dict = self.query_graph.node_type_statistic()
            self.component_assemble()
        while not nx.algorithms.is_weakly_connected(self.query_graph):
            # 若不连通则在联通分量之间添加默认边
            flag = self.add_default_edge()
            if not flag:
                logger.info('default edge missing!')
                # 未添加上说明缺少默认边
                break
        # 经过上面两个循环,得到连通的图,下面确定意图
        self.determine_intention()
Beispiel #28
0
def test_overspecified_sources():
    """
    When sources are directly specified, we wont be able to determine when we
    are in the last component, so there will always be a trailing, leftmost
    pipe.
    """
    graph = nx.disjoint_union_all([
        nx.balanced_tree(r=2, h=1, create_using=nx.DiGraph),
        nx.balanced_tree(r=1, h=2, create_using=nx.DiGraph),
        nx.balanced_tree(r=2, h=1, create_using=nx.DiGraph),
    ])

    # defined starting point
    target1 = dedent("""
        ╟── 0
        ╎   ├─╼ 1
        ╎   └─╼ 2
        ╟── 3
        ╎   └─╼ 4
        ╎       └─╼ 5
        ╟── 6
        ╎   ├─╼ 7
        ╎   └─╼ 8
        """).strip()

    target2 = dedent("""
        ╟── 0
        ╎   ├─╼ 1
        ╎   └─╼ 2
        ╟── 3
        ╎   └─╼ 4
        ╎       └─╼ 5
        ╙── 6
            ├─╼ 7
            └─╼ 8
        """).strip()

    lines = []
    nx.forest_str(graph, write=lines.append, sources=graph.nodes)
    got1 = chr(10).join(lines)
    print("got1: ")
    print(got1)

    lines = []
    nx.forest_str(graph, write=lines.append)
    got2 = chr(10).join(lines)
    print("got2: ")
    print(got2)

    assert got1 == target1
    assert got2 == target2
def reveal(args, idx=None, writer=None):
    labels_dict = {
        "None": 5,
        "Employee": 0,
        "Vice President": 1,
        "Manager": 2,
        "Trader": 3,
        "CEO+Managing Director+Director+President": 4,
    }
    max_enron_id = 183
    if idx is None:
        G_list = []
        labels_list = []
        for i in range(10):
            net = pickle.load(
                open("data/gnn-explainer-enron/enron_slice_{}.pkl".format(i),
                     "rb"))
            # net.add_nodes_from(range(max_enron_id))
            # labels=[n[1].get('role', 'None') for n in net.nodes(data=True)]
            # labels_num = [labels_dict[l] for l in labels]
            featgen_const = featgen.ConstFeatureGen(
                np.ones(args.input_dim, dtype=float))
            featgen_const.gen_node_features(net)
            G_list.append(net)
            print(net.number_of_nodes())
            # labels_list.append(labels_num)

        G = nx.disjoint_union_all(G_list)
        model = models.GcnEncoderNode(
            args.input_dim,
            args.hidden_dim,
            args.output_dim,
            len(labels_dict),
            args.num_gc_layers,
            bn=args.bn,
            args=args,
        )
        labels = [n[1].get("role", "None") for n in G.nodes(data=True)]
        labels_num = [labels_dict[l] for l in labels]
        for i in range(5):
            print("Label ", i, ": ", labels_num.count(i))

        print("Total num nodes: ", len(labels_num))
        print(labels_num)

        if args.gpu:
            model = model.cuda()
        train_node_classifier(G, labels_num, model, args, writer=writer)
    else:
        print("Running Enron full task")
Beispiel #30
0
def windmill_graph(n, k):
    """Generate a windmill graph.
    A windmill graph is a graph of `n` cliques each of size `k` that are all
    joined at one node.
    It can be thought of as taking a disjoint union of `n` cliques of size `k`,
    selecting one point from each, and contracting all of the selected points.
    Alternatively, one could generate `n` cliques of size `k-1` and one node
    that is connected to all other nodes in the graph.

    Parameters
    ----------
    n : int
        Number of cliques
    k : int
        Size of cliques

    Returns
    -------
    G : NetworkX Graph
        windmill graph with n cliques of size k

    Raises
    ------
    NetworkXError
        If the number of cliques is less than two
        If the size of the cliques are less than two

    Examples
    --------
    >>> G = nx.windmill_graph(4, 5)

    Notes
    -----
    The node labeled `0` will be the node connected to all other nodes.
    Note that windmill graphs are usually denoted `Wd(k,n)`, so the parameters
    are in the opposite order as the parameters of this method.
    """
    if n < 2:
        msg = "A windmill graph must have at least two cliques"
        raise nx.NetworkXError(msg)
    if k < 2:
        raise nx.NetworkXError("The cliques must have at least two nodes")

    G = nx.disjoint_union_all(
        itertools.chain(
            [nx.complete_graph(k)], (nx.complete_graph(k - 1) for _ in range(n - 1))
        )
    )
    G.add_edges_from((0, i) for i in range(k, G.number_of_nodes()))
    return G
Beispiel #31
0
def annotated_forest_generator(part, dag):
    tgs = [
        annotated_tree_generator(block, Counter(dag[block])) for block in part
    ]
    fg = product(*tgs)
    for f in fg:
        forest = nx.disjoint_union_all(f)
        Hlabels = nx.get_node_attributes(forest, 'H')
        hybrids = {v: k for k, v in Hlabels.items()}
        Tlabels = nx.get_node_attributes(forest, 'T')
        for u, Tlab in Tlabels.items():
            forest.add_edge(u, hybrids[Tlab])
        root = [u for u in forest.nodes() if forest.in_degree(u) == 0][0]
        forest.remove_node(root)
        yield forest
Beispiel #32
0
def windmill_graph(n, k):
    """Generate a windmill graph.
    A windmill graph is a graph of `n` cliques each of size `k` that are all
    joined at one node.
    It can be thought of as taking a disjoint union of `n` cliques of size `k`,
    selecting one point from each, and contracting all of the selected points.
    Alternatively, one could generate `n` cliques of size `k-1` and one node
    that is connected to all other nodes in the graph.

    Parameters
    ----------
    n : int
        Number of cliques
    k : int
        Size of cliques

    Returns
    -------
    G : NetworkX Graph
        windmill graph with n cliques of size k

    Raises
    ------
    NetworkXError
        If the number of cliques is less than two
        If the size of the cliques are less than two

    Examples
    --------
    >>> G = nx.windmill_graph(4, 5)

    Notes
    -----
    The node labeled `0` will be the node connected to all other nodes.
    Note that windmill graphs are usually denoted `Wd(k,n)`, so the parameters
    are in the opposite order as the parameters of this method.
    """
    if n < 2:
        msg = 'A windmill graph must have at least two cliques'
        raise nx.NetworkXError(msg)
    if k < 2:
        raise nx.NetworkXError('The cliques must have at least two nodes')

    G = nx.disjoint_union_all(itertools.chain([nx.complete_graph(k)],
                                              (nx.complete_graph(k - 1)
                                               for _ in range(n - 1))))
    G.add_edges_from((0, i) for i in range(k, G.number_of_nodes()))
    return G
Beispiel #33
0
 def init_dep_graph(self):
     for item in self.dependency:
         f = item['from']
         t = item['to']
         if f['type'] == 'entity' and t['type'] == 'relation':
             temp_graph = self.from_ent_to_rel(f['value'], t['value'])
             self.dep_graph_list.append(temp_graph)
         elif f['type'] == 'relation' and t['type'] == 'entity':
             temp_graph = self.from_rel_to_ent(f['value'], t['value'])
             self.dep_graph_list.append(temp_graph)
     self.dep_graph = nx.disjoint_union_all(self.dep_graph_list)
     mapping = dict()
     for i, n in enumerate(self.dep_graph.nodes):
         mapping[n] = i
     nx.relabel_nodes(self.dep_graph, mapping, copy=False)
     self.dep_graph = Graph(self.dep_graph)
Beispiel #34
0
def get_all_users_interactions(postings=None,
                               votes=None,
                               multi_di_graph=False,
                               with_timestamp=False,
                               salvage_original_node_ids=False):
    """
	:param postings: Custom postings dataframe, if None (default) use all postings.
	:param votes: Custom votes dataframe, if None (default) use all votes.
	:param multi_di_graph: If True, return nx.MultiDiGraph, if False (default) nx.DiGraph.
	:param with_timestamp: If True, edges contain an attribute "created_at".
	:return: A directed graph (or multigraph) with users as nodes and arc from user1 to user2, if user1 interacted with user2,
		i.e. voted positively, negatively, or did a comment.
	"""
    if postings is None or votes is None:
        postings = utils.read_all_postings()
        votes = utils.read_all_votes()

    joined = postings.merge(votes, on="ID_Posting", suffixes=("_p", "_v"))
    positives = get_users_voted_other_users(joined,
                                            positive_vote=True,
                                            multi_di_graph=multi_di_graph,
                                            with_timestamp=with_timestamp)
    negatives = get_users_voted_other_users(joined,
                                            positive_vote=False,
                                            multi_di_graph=multi_di_graph,
                                            with_timestamp=with_timestamp)
    comments = get_users_commented_other_users(postings,
                                               multi_di_graph=multi_di_graph,
                                               with_timestamp=with_timestamp)

    if salvage_original_node_ids:
        a = nx.to_pandas_edgelist(positives)
        b = nx.to_pandas_edgelist(negatives)
        c = nx.to_pandas_edgelist(comments)
        if with_timestamp:
            return nx.from_pandas_edgelist(pd.concat([a, b, c]),
                                           source="source",
                                           target="target",
                                           edge_attr="created_at",
                                           create_using=nx.MultiDiGraph)
        else:
            return nx.from_pandas_edgelist(pd.concat([a, b, c]),
                                           source="source",
                                           target="target",
                                           create_using=nx.MultiDiGraph)
    else:
        return nx.disjoint_union_all([positives, negatives, comments])
def sbm(cmtysize, pin, pout):
    graphs = []
    for i in range(0, len(cmtysize)):
        graphs.append(nx.gnp_random_graph(cmtysize[i], pin))
    G=nx.disjoint_union_all(graphs)

    s=[]
    s.append(0)
    for i in range(0, len(cmtysize)):
        s.append(s[i-1]+cmtysize[i])

    for i in range(0, len(cmtysize)):
        for n in range(s[i], s[i+1]):
            for m in range(s[i+1], G.number_of_nodes()):
                if rand()<pout:
                        G.add_edge(n, m)
    return G;
Beispiel #36
0
    def add_layer(self, layer, **attr):
        if self.num_nodes is 0:
            self.list_of_layers = [layer]
        else:
            self.list_of_layers.append(layer)
            self.num_nodes_in_layers.append(layer.number_of_nodes())

        self.num_layers = len(self.list_of_layers)

        for i, j in layer.edges():
            self.intra_layer_edges.append(
                (i + (len(self.list_of_layers) - 1) * layer.number_of_nodes(),
                 j + (len(self.list_of_layers) - 1) * layer.number_of_nodes()))
            try:
                Graph.__init__(
                    self, Graph(disjoint_union_all(self.list_of_layers),
                                **attr))
            except multinetxError:
                raise multinetxError("Multiplex cannot inherit Graph properly")
Beispiel #37
0
def n_community(c_sizes, p_inter=0.01):
    graphs = [nx.gnp_random_graph(c_sizes[i], 0.7, seed=i) for i in range(len(c_sizes))]
    G = nx.disjoint_union_all(graphs)
    communities = list(nx.connected_component_subgraphs(G))
    for i in range(len(communities)):
        subG1 = communities[i]
        nodes1 = list(subG1.nodes())
        for j in range(i+1, len(communities)):
            subG2 = communities[j]
            nodes2 = list(subG2.nodes())
            has_inter_edge = False
            for n1 in nodes1:
                for n2 in nodes2:
                    if np.random.rand() < p_inter:
                        G.add_edge(n1, n2)
                        has_inter_edge = True
            if not has_inter_edge:
                G.add_edge(nodes1[0], nodes2[0])
    #print('connected comp: ', len(list(nx.connected_component_subgraphs(G))))
    return G
Beispiel #38
0
def disjoint_cliques_test_graph(num_cliques, clique_size):
    G = nx.disjoint_union_all([nx.complete_graph(clique_size) for _ in range(num_cliques)])
    return nx.to_numpy_matrix(G)
# the following is the bisimulation example:
S = nx.DiGraph()
S.add_edge(0,1,action=1)
S.add_edge(1,2,action=2)
S.add_edge(0,2,action=1)
S.add_edge(2,2,action=2)
actions = [1,2]

T = nx.DiGraph()
T.add_edge(0,1,action=1)
T.add_edge(1,1,action=2)

k = PaigeAndTarjan([1,2])
print(k.isBisimilar(S,T))

Q = nx.disjoint_union_all([S,T])
k.getCoarsestPartition(Q)

# Basic tests:
# The following test is example 1, shown in class
Q = nx.DiGraph()
Q.add_edge(1,2,action=1)
Q.add_edge(1,3,action=2)
Q.add_edge(1,4,action=3)
Q.add_edge(3,4,action=1)
Q.add_edge(3,5,action=2)
actions = [1, 2, 3]

k = PaigeAndTarjan([1,2,3])
k.getCoarsestPartition(Q)
Beispiel #40
0
def test_mixed_type_disjoint_union():
    G = nx.Graph()
    H = nx.MultiGraph()
    I = nx.Graph()
    U = nx.disjoint_union_all([G,H,I])
Beispiel #41
0
def sen(n1, p1, n2, p2, n3, p3, pa, pb):
    """Creates a SEN model with an actor network of n1 elements, with a neighbourhood connectivity of p1, a user network of n2 elements, with a neighbourhood connectivity of p2, and a ecological network of n3 elements with a neighbourhood connectivity of p3. The probability of connections between the actor network and the user netwrok, and between the user network and the ecological network, are respectively passed through pa and pb"""

    # Actor network
    A = nx.newman_watts_strogatz_graph(n1, p1, 1)
    A.graph["Network"] = "Actors"
    # Adding id
    for i in range(len(A)):
        A.node[i]["num"] = i + 1
        # Subnetwork
    for i in range(len(A)):
        A.node[i]["subnetwork"] = 1
        # Adding random class (office dwellers/field people)
    p_class_bureau = 0.75
    for i in range(len(A)):
        if random.random() <= p_class_bureau:
            A.node[i]["group"] = 0
        else:
            A.node[i]["group"] = 0
            # Adding randow weight
    # 	for n,nbrs in A.adjacency_iter():
    # 		for nbr,eattr in nbrs.items():
    # 			A[n][nbr]['weight'] = int(random.random()*8)

    # User network
    U = nx.newman_watts_strogatz_graph(n2, p2, 1)
    U.graph["Network"] = "Actors"
    # Adding id
    for i in range(len(U)):
        U.node[i]["num"] = i + 1001
        # Subnetwork
    for i in range(len(U)):
        U.node[i]["subnetwork"] = 2
        # Adding random class (office dwellers/field people)
    for i in range(len(U)):
        rnd = random.random()
        if rnd <= 0.2:
            U.node[i]["group"] = 1
        if rnd > 0.2 and rnd <= 0.4:
            U.node[i]["group"] = 1
        if rnd > 0.4 and rnd <= 0.6:
            U.node[i]["group"] = 1
        if rnd > 0.6 and rnd <= 0.8:
            U.node[i]["group"] = 1
        if rnd > 0.8:
            U.node[i]["group"] = 1
            # Adding randow weight
    # 	for n,nbrs in U.adjacency_iter():
    # 		for nbr,eattr in nbrs.items():
    # 			U[n][nbr]['weight'] = int(random.random()*8)

    # Ecological network
    E = nx.newman_watts_strogatz_graph(n3, p3, 1)
    E.graph["Network"] = "Actors"
    # Adding id
    for i in range(len(E)):
        E.node[i]["num"] = i + 10001
        # Subnetwork
    for i in range(len(E)):
        E.node[i]["subnetwork"] = 3
        # Adding class
    for i in range(len(E)):
        E.node[i]["group"] = 2
        # Adding weight
    # 	for n,nbrs in E.adjacency_iter():
    # 		for nbr,eattr in nbrs.items():
    # 			E[n][nbr]['weight'] = 5

    # joint the three subnetworks
    G = nx.disjoint_union_all([A, U, E])

    # link some actors to some users
    for i in range(0, len(G)):
        for j in range(0, len(G)):
            if i != j:
                if G.node[i]["subnetwork"] == 1 and G.node[j]["subnetwork"] == 2:
                    # if G.node[i]['group'] == 1 and G.node[j]['group'] == 2:
                    if random.random() < pa:
                        G.add_edge(i, j)

                # link some users to some patches
    for i in range(0, len(G)):
        G.node[i]["degreeold"] = G.degree(i)

    for i in range(0, len(G)):
        for j in range(0, len(G)):
            if i != j:
                # print str(j) + " " + str(G.degree(j))
                if (
                    G.node[i]["subnetwork"] == 2
                    and G.node[j]["subnetwork"] == 3
                    and G.degree(j) - G.node[j]["degreeold"] < 4
                ):
                    if G.degree(j) - G.node[j]["degreeold"] < 4:
                        if random.random() < pb:
                            G.add_edge(i, j)

                # write json formatted data
    d = json_graph.node_link_data(G)
    json.dump(d, open("/Users/Rodolphe/Dropbox/Public/d3/examples/force/force.json", "w"))

    nx.write_graphml(G, "graph.graphml")

    return G
Beispiel #42
0
def test_empty_disjoint_union():
    nx.disjoint_union_all([])
	def init_network_others(self, graph_name):
		# graph_name = "communities"
		if(graph_name=="SmallWorld"):
			self.Net = nx.watts_strogatz_graph(self.N, 4, 0.1, seed=seed) # small word network
			self.position=nx.spring_layout(self.Net)
		elif(graph_name=="ScaleFree"):
			self.Net = nx.barabasi_albert_graph(self.N, 4, seed=seed) # complet graph
			self.position=nx.spring_layout(self.Net)	
		elif(graph_name=="completGraph"):
			self.Net = nx.complete_graph(self.N) # complet graph
			self.position=nx.spring_layout(self.Net)
		elif(graph_name=="RandomGraph"):
			self.Net = nx.gnp_random_graph(self.N, p=0.2, seed=seed)
			self.position=nx.spring_layout(self.Net)
		elif(graph_name=="communities"):
			self.Net = nx.Graph()
			Ncommunities = 5
			Sizecommunities = 500
			N_edges = 40 # 2500 nodes: total 1000 noise edges
			assert Ncommunities * Sizecommunities == self.N
			for i in range(Ncommunities):
				self.Net = nx.disjoint_union_all([self.Net, nx.barabasi_albert_graph(Sizecommunities,4, seed=seed)])
			print("#community: %d, edge number: %d" %(Ncommunities ,nx.number_of_edges(self.Net)))
			for i in range(Ncommunities):
				for j in range(N_edges):
					all_comms = set(range(Ncommunities))
					this_comm = set([i])
					another_comm = np.random.choice(list(all_comms - this_comm))
					node1 = np.random.choice(range(i%Ncommunities*Sizecommunities, i%Ncommunities*Sizecommunities+Sizecommunities)) 
					node2 = np.random.choice(range(another_comm%Ncommunities*Sizecommunities, another_comm%Ncommunities*Sizecommunities+Sizecommunities))
					self.Net.add_edge(node1, node2)
			print("#community: %d, edge number: %d" %(Ncommunities ,nx.number_of_edges(self.Net)))
			# g1 = nx.barabasi_albert_graph(self.N_A, 4, seed=seed)			
			# g2 = nx.barabasi_albert_graph(self.N_B, 4, seed=seed)
			# g3 = nx.barabasi_albert_graph(self.N_AB, 4, seed=seed)
			# # g1 = nx.complete_graph(self.N_A)
			# # g2 = nx.complete_graph(self.N_B)
			# # g3 = nx.complete_graph(self.N_AB)
			# # g3 = nx.watts_strogatz_graph(self.N_AB, 4, 0.2, seed=seed)# AB is not fully connected
			# self.Net = nx.disjoint_union_all([g1, g2, g3])
			# self.Net1 = g1
			# self.Net2 = g2
			# self.Net3 = g3
			# N_random_number = 50 # the number of random edges added to the network
			# for i in range(N_random_number):
			# 	e = np.random.choice(self.N, 2, replace=False)
			# 	self.Net.add_edge(e[0], e[1])


			self.position = nx.spring_layout(self.Net)
			# self.initPos()
			# N_12_edges = 10
			# N_13_edges = 20
			# N_23_edges = 20
			# for i in range(N_12_edges):
			# 	node1 = np.random.choice(range(0, self.N_A))
			# 	node2 = np.random.choice(range(self.N_A, self.N_A+self.N_B))
			# 	self.Net.add_edge(node1, node2)
			# for i in range(N_13_edges):
			# 	node1 = np.random.choice(range(0, self.N_A))
			# 	node2 = np.random.choice(range(self.N_A+self.N_B, self.N))
			# 	self.Net.add_edge(node1, node2)
			# for i in range(N_23_edges):
			# 	node1 = np.random.choice(range(self.N_A, self.N_A+self.N_B))
			# 	node2 = np.random.choice(range(self.N_A+self.N_B, self.N))
			# 	self.Net.add_edge(node1, node2)
		else:
			self.init_network(self)

		# State
		print("initializing states...")
		# self.comm_init_State(Ncommunities, Sizecommunities)
		for i in range(0, self.N):
			self.State[i] = np.random.choice([1,2,3])
Beispiel #44
0
    def __init__(self, 
                list_of_layers=None, 
                inter_adjacency_matrix=None,
                **attr):
        """Constructor of a MultilayerGraph. 
        It creates a symmetric (undirected) MultilayerGraph object 
        inheriting methods from networkx.Graph
        
        Parameters:
        -----------
        list_of_layers : Python list of networkx.Graph objects
         
        inter_adjacency_matrix : a lil sparse matrix (NxN) with zero 
								 diagonal elements and off-diagonal 
								 block elements defined by the 
                                 inter-connectivity architecture.
        
        Return: a MultilayerGraph object
        
        Examples:
        ---------
        import multinetx as mx
        N = 10
		g1 = mx.erdos_renyi_graph(N,0.07,seed=218)
		g2 = mx.erdos_renyi_graph(N,0.07,seed=211)
		g3 = mx.erdos_renyi_graph(N,0.07,seed=211)
                
        adj_block = mx.lil_matrix(np.zeros((N*3,N*3)))
		adj_block[0:  N,  N:2*N] = np.identity(N)    # L_12
		adj_block[0:  N,2*N:3*N] = np.identity(N)    # L_13
		#adj_block[N:2*N,2*N:3*N] = np.identity(N)    # L_23
		adj_block += adj_block.T

		mg = mx.MultilayerGraph(list_of_layers=[g1,g2,g3], 
								inter_adjacency_matrix=adj_block)

		mg.set_edges_weights(inter_layer_edges_weight=4)
		mg.set_intra_edges_weights(layer=0,weight=1)
		mg.set_intra_edges_weights(layer=1,weight=2)
		mg.set_intra_edges_weights(layer=2,weight=3)
		
		
        """       
        ## Give an empty graph in the list_of_layers
        if list_of_layers is None:
            self.list_of_layers = [Graph()]
        else:
            self.list_of_layers = list_of_layers
        
        ## Number of layers
        self.num_layers = len(self.list_of_layers)
        
        ## Number of nodes in each layer
        self.num_nodes_in_layers = self.list_of_layers[0].number_of_nodes()       
        
        ## Create the MultilayerGraph without inter-layer links.
        try:
            Graph.__init__(self,
                        Graph(disjoint_union_all(self.list_of_layers),
                        **attr))
        except multinetxError:
            raise multinetxError("Multiplex cannot inherit Graph properly")
            
        ## Check if all graphs have the same number of nodes
        for lg in self.list_of_layers:
            try:
                assert(lg.number_of_nodes() == self.num_nodes_in_layers)
            except AssertionError:    
                raise multinetxError("Graph at layer does not have")
                                     
        
        ## Make a zero lil matrix for inter_adjacency_matrix
        if inter_adjacency_matrix is None:
           inter_adjacency_matrix = \
                       lil_matrix(zeros(
                       (self.num_nodes_in_layers*self.num_layers,
                       self.num_nodes_in_layers*self.num_layers)))
        
        ## Check if the matrix inter_adjacency_matrix is lil
        try:
            assert(inter_adjacency_matrix.format == "lil")
        except AssertionError:    
            raise multinetxError("interconnecting_adjacency_matrix "\
                                 "is not scipy.sparse.lil")         
                
        ## Lists for intra-layer and inter-layer edges
        if list_of_layers is None:
		    self.intra_layer_edges = []
        else:
		    self.intra_layer_edges = self.edges()			
        self.inter_layer_edges = []
        
        ## Inter-layer connection
        self.layers_interconnect(inter_adjacency_matrix)
    
        ## MultiNetX name
        self.name = "multilayer"
        for layer in self.list_of_layers:
            self.name += "_" + layer.name