def balanced_tree(self, vertex_count):
        maximal_children_count = random.randint(
            0, min(self.maximal_children_count, vertex_count - 1))

        t = nx.full_rary_tree(maximal_children_count, vertex_count)

        return t
Exemplo n.º 2
0
def balanced_tree(r, h, create_using=None):
    """Return the perfectly balanced r-tree of height h.
    Parameters
    ----------
    r : int
        Branching factor of the tree
    h : int
        Height of the tree
    create_using : NetworkX graph type, optional
        Use specified type to construct graph (default = networkx.Graph)
    Returns
    -------
    G : networkx Graph
        A tree with n nodes
    Notes
    -----
    This is the rooted tree where all leaves are at distance h from
    the root. The root has degree r and all other internal nodes have
    degree r+1.
    Node labels are the integers 0 (the root) up to  number_of_nodes - 1.
    Also refered to as a complete r-ary tree.
    """
    # number of nodes is n=1+r+..+r^h
    if r == 1:
        n = 2
    else:
        n = int((1 - r**(h + 1)) / (1 - r))  # sum of geometric series r!=1
    G = nx.empty_graph(n, create_using)
    G.add_edges_from(_tree_edges(n, r))
    return G

    return nx.full_rary_tree(r, n, create_using)
Exemplo n.º 3
0
 def test_compute_mi_fat(self):
     gold = [2.0, 1.0, 0.6666666666666667, 0.5000000000000011,
             0.2999999999999875, 0.21111111111116665]
     branches = 3
     graphs = [nx.full_rary_tree(branches, i)
               for i in range(1, 1 + len(gold))]
     smi = SMI()
     res = [smi.compute_mi(graph_to_tree(g, n_bbox=1)) for g in graphs]
     for a, b in zip(res, gold):
         self.assertAlmostEqual(a, b)
Exemplo n.º 4
0
 def test_full_rary_tree(self):
     r = 2
     n = 9
     t = nx.full_rary_tree(r, n)
     assert t.order() == n
     assert nx.is_connected(t)
     dh = nx.degree_histogram(t)
     assert dh[0] == 0  # no nodes of 0
     assert dh[1] == 5  # nodes of degree 1 are leaves
     assert dh[r] == 1  # root is degree r
     assert dh[r + 1] == 9 - 5 - 1  # everyone else is degree r+1
     assert len(dh) == r + 2
Exemplo n.º 5
0
def get_graph_by_type(graph_type: str, num_nodes: int, **kwargs) -> nx.Graph:
    if graph_type == "E-R":
        return nx.erdos_renyi_graph(n=num_nodes, **kwargs)
    elif graph_type == "Random Tree":
        return nx.random_tree(n=num_nodes, **kwargs)
    elif graph_type == "r-ary":
        return nx.full_rary_tree(n=num_nodes, **kwargs)
    elif graph_type == "Planted Partition":
        return nx.planted_partition_graph(**kwargs)
    elif graph_type == "Line":
        return nx.path_graph(n=num_nodes, **kwargs)
    elif graph_type == "Barabási–Albert":
        return nx.barabasi_albert_graph(n=num_nodes, **kwargs)
Exemplo n.º 6
0
def create_graph(graph_type, n_nodes, n_branches=3):
    """
    generate graph from networkx
    :param graph_type: STAR, FAT/SNOW, PATH, CHAIN
    :param n_nodes: number of nodes
    :param n_branches: number of branches in snow case
    :return:
    """
    if graph_type == 'PATH':
        graph = nx.generators.path_graph(n_nodes)
    elif graph_type == 'FAT' or graph_type == 'SNOW':
        graph = nx.full_rary_tree(n_branches, n_nodes)
    elif graph_type == 'STAR':
        graph = nx.star_graph(n_nodes)
    elif graph_type == 'CHAIN':
        graph = nx.generators.path_graph(n_nodes)
        mapping = half_chain_relabel(n_nodes)
        graph = nx.relabel_nodes(graph, mapping=mapping)
    else:
        graph = None
    return graph
Exemplo n.º 7
0
def testOriginaltoCluster(n, threshold):
    G_test = nx.full_rary_tree(3,n)
    setAllNodeAttributes(G_test)
    G_cluster, G_cluster2 = buildClusteredSet(G_test, threshold)
    color_map = []
    for nodeID in G_test.nodes():
        if G_test.nodes[nodeID]['criticality'] >= threshold:
            color_map.append('red')
        else:
            color_map.append('green')
    # graph original tree
    plt.figure(1)
    #nx.draw(G_test, pos=nx.spring_layout(G_test))
    nx.draw(G_test, node_color = color_map, pos=nx.spring_layout(G_test), arrows=False, with_labels=True)
    plt.figure(2)
    #nx.draw(G_cluster, pos=nx.spring_layout(G_cluster))
    nx.draw(G_cluster, pos=nx.spring_layout(G_cluster),with_labels=True)
    edge_labels = nx.get_edge_attributes(G_cluster,'data')
    nx.draw_networkx_edge_labels(G_cluster, pos=nx.spring_layout(G_cluster), edge_labels=edge_labels)
    tree_decomp = None
    try:
        nx.find_cycle(G_cluster2)
        print("cycle was found in graph. printing tree decomposition information")
        tree_decomp = treeDecompPlayground(G_cluster2)
        #return
    except nx.exception.NetworkXNoCycle:
        print("no cycle found in graph")
        pass
    
    #print("cycle?", nx.find_cycle(G_cluster))
    f = open("make_matrix_info.txt", "w+")
    f.write("cluster dictionary:" + str(clusterDict) + "\n")
    f.write("rej node dictionary: " + str(rejectingNodeDict) + "\n")
    f.write("edge data:" + str(G_cluster.edges.data()) + "\n")
    f.write("node data:" + str(G_cluster.nodes.data()) + "\n")
    f.close()
    clearVisitedNodesAndDictionaries(G_cluster)
    makeMatrix(G_cluster2, G_cluster2.number_of_nodes())
    return G_cluster, G_cluster2, tree_decomp
Exemplo n.º 8
0
def balanced_tree(r, h, create_using=None):
    """Return the perfectly balanced r-tree of height h.

    Parameters
    ----------
    r : int
        Branching factor of the tree
    h : int
        Height of the tree
    create_using : NetworkX graph type, optional
        Use specified type to construct graph (default = networkx.Graph)

    Returns
    -------
    G : networkx Graph
        A tree with n nodes

    Notes
    -----
    This is the rooted tree where all leaves are at distance h from
    the root. The root has degree r and all other internal nodes have
    degree r+1.

    Node labels are the integers 0 (the root) up to  number_of_nodes - 1.

    Also refered to as a complete r-ary tree.
    """
    # number of nodes is n=1+r+..+r^h
    if r==1:
        n=2
    else:
        n = int((1-r**(h+1))/(1-r)) # sum of geometric series r!=1
    G=nx.empty_graph(n,create_using)
    G.add_edges_from(_tree_edges(n,r))
    return G

    return nx.full_rary_tree(r,n,create_using)
def gen_graph(args):
    """ Generate a graph based on command line arguments """
    graph = None
    ref = nx.DiGraph if args.directed else None
    try:
        if args.grnm:
            graph = nx.gnm_random_graph(args.n,
                                        args.m,
                                        seed=args.seed,
                                        directed=args.directed)
        elif args.grnd:
            graph = nx.random_regular_graph(args.d, args.n, seed=args.seed)
        elif args.grnp:
            graph = nx.gnp_random_graph(args.n,
                                        args.p,
                                        seed=args.seed,
                                        directed=args.directed)
        elif args.gkn:
            graph = nx.complete_graph(args.n, create_using=ref)
        elif args.gcn:
            if args.n == 0:
                raise nx.NetworkXError("n must be positive")
            graph = nx.cycle_graph(args.n, create_using=ref)
        elif args.gpn:
            graph = nx.path_graph(args.n, create_using=ref)
        elif args.trn:
            graph = nx.random_tree(args.n, seed=args.seed)
        elif args.tch:
            graph = nx.balanced_tree(args.c, args.h, create_using=ref)
        elif args.tcn:
            graph = nx.full_rary_tree(args.c, args.n, create_using=ref)
    except nx.NetworkXError as err:
        print('Error: %s' % err)
    except nx.NetworkXPointlessConcept as err:
        print('Error: %s' % err)
    return graph
Exemplo n.º 10
0
import networkx as nx

#G = nx.full_rary_tree(3, 4)
#G = nx.full_rary_tree(9, 19)
#G = nx.full_rary_tree(9, 50)
G = nx.full_rary_tree(9, 150)
for u, v in G.edges():
    print('"' + str(u) + '" -- "' + str(v) + '"')
Exemplo n.º 11
0
 def test_full_rary_tree_empty(self):
     t = nx.full_rary_tree(0, 10)
     assert is_isomorphic(t, nx.empty_graph(10))
     t = nx.full_rary_tree(3, 0)
     assert is_isomorphic(t, nx.empty_graph(0))
Exemplo n.º 12
0
 def test_full_rary_tree_3_20(self):
     t = nx.full_rary_tree(3, 20)
     assert t.order() == 20
Exemplo n.º 13
0
 def test_full_rary_tree_balanced(self):
     t = nx.full_rary_tree(2, 15)
     th = nx.balanced_tree(2, 3)
     assert is_isomorphic(t, th)
Exemplo n.º 14
0
 def test_full_rary_tree_path(self):
     t = nx.full_rary_tree(1, 10)
     assert is_isomorphic(t, nx.path_graph(10))
Exemplo n.º 15
0
 },
 'empty_graph': {
     'name': 'Empty Graph',
     'args': ('n', ),
     'argtypes': (int, ),
     'argvals': (3, ),
     'gen': lambda n: nx.empty_graph(n),
     'description_fn': 'empty_graph(n)',
     'description': 'Returns the empty graph with n nodes and zero edges.',
 },
 'full_rary_tree': {
     'name': 'Full Rary Tree',
     'args': ('r', 'n'),
     'argtypes': (int, int),
     'argvals': (3, 3),
     'gen': lambda r, n: nx.full_rary_tree(r, n),
     'description_fn': 'full_rary_tree(r, n)',
     'description': 'Creates a full r-ary tree of n vertices.',
 },
 'ladder_graph': {
     'name': 'Ladder Graph',
     'args': ('n', ),
     'argtypes': (int, ),
     'argvals': (3, ),
     'gen': lambda n: nx.ladder_graph(n),
     'description_fn': 'ladder_graph(n)',
     'description': 'Returns the Ladder graph of length n.',
 },
 'lollipop_graph': {
     'name': 'Lollipop Graph',
     'args': ('m', 'n'),
Exemplo n.º 16
0
def generateAlternateGraph(num_clusters: int,
                           num_nodes: int,
                           weight_low: int = 0,
                           weight_high: int = 100,
                           draw=True) -> (nx.Graph, list, dict):
    """
    Generates graph given number of clusters and nodes
    Args:
        num_clusters: Number of clusters
        num_nodes: Number of nodes
        weight_low: Lowest possible weight for edge in graph
        weight_high: Highest possible weight for edge in graph
        draw: Whether or not to show graph (True indicates to show)

    Returns:
        Graph with nodes in clusters, array of clusters, graph position for drawing
    """
    node_colors = np.arange(0, num_nodes, 1, np.uint8)  # Stores color of nodes
    total_nodes = 0
    remainder = num_nodes % num_clusters
    clusters = []  # Stores nodes in each cluster
    # organize number of nodes per cluster and assign node colors
    temp = 0
    # fill in cluster and temp cluster variables and set up node_colors variable
    for x in range(num_clusters):
        if remainder > x:
            nodes_per_cluster = int(num_nodes / num_clusters) + 1
        else:
            nodes_per_cluster = int(num_nodes / num_clusters)

        node_colors[temp + np.arange(nodes_per_cluster)] = x
        temp += nodes_per_cluster
        clusters.append(list(np.arange(nodes_per_cluster) + total_nodes))
        total_nodes += nodes_per_cluster
    G = nx.Graph()
    cluster_endpoints = []

    # create first cluster
    cluster = nx.full_rary_tree(int(np.log2(len(clusters[0]))),
                                len(clusters[0]))

    temp = 0  # variable used to ensure diameter is as small as possible
    while nx.diameter(cluster) > (np.log2(len(clusters[0])) + temp):
        cluster = nx.full_rary_tree(int(np.log2(len(clusters[0]))),
                                    len(clusters[0]))
        temp += 1
    nx.set_node_attributes(cluster, 0, 'cluster')

    # set initial edge weight of first cluster
    for (u, v) in cluster.edges():
        cluster.edges[u, v]['weight'] = np.random.normal(100, 25)

    inner_cluster_edges = np.random.randint(0, len(
        clusters[0]), (int(np.log2(len(clusters[0]))), 2))

    # add edge weights to new edges of first cluster
    inner_cluster_edges = [(u, v, np.random.normal(100, 25))
                           for u, v in inner_cluster_edges]
    cluster.add_weighted_edges_from(inner_cluster_edges)

    G = nx.disjoint_union(G, cluster)

    # create other clusters
    for i in range(1, num_clusters):
        # create cluster
        cluster = nx.full_rary_tree(int(np.log2(len(clusters[i]))),
                                    len(clusters[i]))
        temp = 0
        while nx.diameter(cluster) > (np.log2(len(clusters[i])) + temp):
            cluster = nx.full_rary_tree(int(np.log2(len(clusters[i]))),
                                        len(clusters[i]))
            temp += 1

        nx.set_node_attributes(cluster, i, 'cluster')

        # set initial edge weights
        for (u, v) in cluster.edges():
            if not (u in clusters[x][:len(clusters[x]) // 2]
                    ) or v in clusters[x][:len(clusters[x]) // 2]:
                cluster.edges[u, v]['weight'] = np.random.normal(100, 10)
            else:
                cluster.edges[u, v]['weight'] = np.random.normal(100, 10)

        G = nx.disjoint_union(G, cluster)

        # add connections from new clusters to first cluster
        cluster_endpoint = np.random.randint(0, len(clusters[0]))
        cluster_endpoints.append(cluster_endpoint)
        G.add_edge(cluster_endpoint,
                   np.random.choice(clusters[i][(len(clusters[i]) // 2):]),
                   weight=np.random.normal(100, 10))

    # adding inter and inner edges of the clusters
    closest_length = 1000
    nearest_cluster = 0
    shortest_path = 0
    for i in range(1, num_clusters):
        # check for closest cluster besides main cluster
        for x in range(2, num_clusters - 1):
            shortest_path = nx.shortest_path_length(G,
                                                    cluster_endpoints[i - 1],
                                                    cluster_endpoints[x - 1])
            if shortest_path < closest_length:
                closest_length = shortest_path
                nearest_cluster = x

        # add inner_cluster_edges
        # get two random points inside a cluster
        inner_cluster_edges = np.random.randint(
            clusters[i][0], clusters[i][-1] + 1,
            (int(np.log2(len(clusters[i]))), 2))
        inner_cluster_edges = [(u, v, np.random.normal(100, 10))
                               for u, v in inner_cluster_edges]
        # cluster.add_weighted_edges_from(inner_cluster_edges)
        G.add_weighted_edges_from(inner_cluster_edges)

        # if the nearest_cluster is too far away, don't add inter-cluster edges
        if shortest_path > (np.random.randint(np.log2(len(clusters[i])),
                                              np.log2(len(clusters[i])) + 1)):
            continue

        # add inter_cluster_edges
        inter_cluster_edges = np.random.randint(
            clusters[i][len(clusters[i]) // 2], clusters[i][-1] + 1, (int(
                len(clusters[i]) /
                (np.random.randint(0, (np.log2(len(clusters[i])))) + 1))))
        inter_cluster_edges = [[
            y,
            np.random.randint(clusters[nearest_cluster][len(clusters[i]) // 2],
                              clusters[nearest_cluster][-1] + 1),
            np.random.normal(100, 10)
        ] for y in inter_cluster_edges]

        # cluster.add_weighted_edges_from(inner_cluster_edges)
        G.add_weighted_edges_from(inter_cluster_edges)
    G.remove_edges_from(
        nx.selfloop_edges(G))  # Remove self-loops caused by adding random edge

    pos = nx.spring_layout(G)

    # Draw graph
    if draw:
        nx.draw_networkx_nodes(G, pos, node_color=node_colors)
        nx.draw_networkx_labels(G, pos)
        # nx.draw_networkx_edge_labels(G, pos)
        nx.draw_networkx_edges(G, pos, G.edges())
        plt.draw()
        plt.show()

    return G, clusters, pos
Exemplo n.º 17
0
def test_is_aperiodic_rary_tree():
    G = nx.full_rary_tree(3, 27, create_using=nx.DiGraph())
    assert not nx.is_aperiodic(G)
Exemplo n.º 18
0
fig = plt.figure()
nx.draw(G, with_labels=True)
fig.text(0.02, 0.95, "cycle graph", fontweight='bold')
fig.text(0.02, 0.90, "n(node count) = " + str(n))
plt.show()

#dorogovtsev_goltsev_mendes_graph
G = nx.dorogovtsev_goltsev_mendes_graph(3)
fig = plt.figure()
nx.draw(G, with_labels=True)
fig.text(0.02, 0.95, "dorogovtsev goltsev mendes graph", fontweight='bold')
fig.text(0.02, 0.90, "n(node count) = 3")
plt.show()

#full rary_tree
G = nx.full_rary_tree(3, 7)
fig = plt.figure()
nx.draw(G, with_labels=True)
fig.text(0.02, 0.95, "full rary tree", fontweight='bold')
fig.text(0.02, 0.90, "n(node count) = " + str(n) + ", with 3-ary")
plt.show()

#ladder graph
G = nx.ladder_graph(n)
fig = plt.figure()
nx.draw(G, with_labels=True)
fig.text(0.02, 0.95, "ladder graph", fontweight='bold')
fig.text(0.02, 0.90, "n(ladder count) = " + str(n))
plt.show()

#lollipop graph
Exemplo n.º 19
0
import networkx as nx
import random

NUM_NODES = 10
nwrk = nx.full_rary_tree(3, NUM_NODES)
MSG_SIZE = 10
STARTING_NODE = 3
DROP_PROB = 0.1

###############
# LOAD DATA
###############
for i in range(MSG_SIZE):
    nwrk.nodes[STARTING_NODE][str(i)] = ''

start_node = nwrk

i = 0
done = False
while not done:
    i += 1
    if i % 10 == 0:
        print("Iteration:", i)
    for node_tuple in nwrk.nodes(data=True):
        node_dict = node_tuple[1]
        if len(node_dict) == 0:
            continue
        else:
            #if "curr" not in node_dict.keys():
            for dp in node_dict.keys():
                for neighbors in nwrk.adj[node_tuple[0]]:
Exemplo n.º 20
0
 def test_basic(self):
     """Tests for joining multiple subtrees at a root node."""
     trees = [(nx.full_rary_tree(2, 2 ** 2 - 1), 0) for i in range(2)]
     actual = nx.join(trees)
     expected = nx.full_rary_tree(2, 2 ** 3 - 1)
     assert_true(nx.is_isomorphic(actual, expected))
Exemplo n.º 21
0
    fileObject.write('\n')
    fileObject.close()


def ContractDict(dir, G):
    with open(dir, 'a') as f:
        for line in f:
            line1 = line.split()
            G.add_edge(int(line1[0]), int(line1[1]))
    for edge in G.edges:
        G.add_edge(edge[0], edge[1], weight=1)
        # G.add_edge(edge[0],edge[1],weight=effectDistance(randomnum))
    print(len(list(G.nodes)))
    # G.remove_node(0)
    print(len(list(G.nodes)))
    return G


G = nx.full_rary_tree(6, 5000)  #生成规定节点数目的3叉树

# G=nx.fast_gnp_random_graph(5000,p=0.05) #生成随即图

# G=nx.random_graphs.barabasi_albert_graph(500,2)  #生成无标度图,

# G=nx.random_powerlaw_tree(2000)
print('isconnect?', nx.is_connected(G))
Gc = max(nx.connected_component_subgraphs(G), key=len)

for edge in Gc.edges():
    listToTxt(edge, '6regular_tree_5000.txt')
Exemplo n.º 22
0
def test_hierarchy_tree():
    G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph())
    assert nx.flow_hierarchy(G) == 1.0
Exemplo n.º 23
0
'''

misdatos = pd.DataFrame()

sources = [2,3,4,5,6]
sinks = [4,2,3,1,0]
for nodes in range(5):
    for k in range(10):
        for l in range(3,7):
            l = 2 ** l
            #print(l)

            #PRIMER GENERADOR

            tiempo = time()
            F = nx.full_rary_tree(2, l)
            w = norm.rvs(10.0, 0.5, nx.number_of_edges(F))
            mu, std = norm.fit(w)
            m = 0
            for u,v,d in F.edges(data=True):
                d['weight'] = w[m]
                m += 1
            tiempo = time() - tiempo

            tiempoalgo = time()
            for a in range(5):
                flow_value = maximum_flow_value(F, sources[nodes], sinks[nodes], capacity='weight')
            tiempoalgo = time() - tiempoalgo + tiempo

            row = pd.DataFrame({'Generador': ['Árbol lleno r-ario'], 'Algoritmo': ['Valor de flujo máximo'],
                                'Orden': len(F),'Densidad': F.size()/nx.complete_graph(l).size(),
Exemplo n.º 24
0
 def test_encoding(self):
     T = nx.full_rary_tree(2, 2**3 - 1)
     expected = (((), ()), ((), ()))
     actual = nx.to_nested_tuple(T, 0)
     assert_nodes_equal(expected, actual)
def create(args):
    ### load datasets
    graphs = []
    # synthetic graphs
    if args.graph_type == 'ladder':
        graphs = []
        for i in range(100, 201):
            graphs.append(nx.ladder_graph(i))
        args.max_prev_node = 10
    elif args.graph_type == 'ladder_small':
        graphs = []
        for i in range(2, 11):
            graphs.append(nx.ladder_graph(i))
        args.max_prev_node = 10
    elif args.graph_type == 'ladder_extra':
        graphs = []
        for i in range(1000):
            # 50 nodes in all graphs
            graphs.append(ladder_extra(6, 10))

        # Have to see what max_prev nodes is
        args.max_prev_node = 28  # Just for 6,10
        return graphs
    elif args.graph_type == 'ladder_extra_circular':
        graphs = []
        for i in range(1000):
            # 50 nodes in all graphs
            graphs.append(ladder_extra_circular(6, 10))

        # Have to see what max_prev nodes is!!
        args.max_prev_node = 28  # Just for 6,10
        return graphs
    elif args.graph_type == 'ladder_extra_full_circular':
        graphs = []
        for i in range(1000):
            # 50 nodes in all graphs
            graphs.append(ladder_extra_full_circular(6, 10))

        # Have to see what max_prev nodes is
        args.max_prev_node = 28  # Just for 6,10
        return graphs
    elif args.graph_type.startswith('layer_tree'):
        graphs = []
        width = 6
        branch = 3
        height_indx = args.graph_type.rfind('_') + 1
        height = int(args.graph_type[height_indx:])
        for i in range(1000):
            G = layered_tree(width, height, branch_factor=branch)
            graphs.append(G)

        # Max prev nodes????
        args.max_prev_node = 31  # width = 6, branch = 3
        return graphs
    elif args.graph_type.startswith('ladder_tree'):
        graphs = []
        width = 6
        branch = 2
        height_indx = args.graph_type.rfind('_') + 1
        height = int(args.graph_type[height_indx:])
        for i in range(1000):
            G = ladder_tree(width, height, branch_factor=branch)
            graphs.append(G)

        args.max_prev_node = 28  # width = 6, branch = 2
        return graphs
    elif args.graph_type.startswith('random'):
        indx_degree = int(args.graph_type.find('_')) + 1
        indx_nodes = int(args.graph_type.find('_', indx_degree))

        degree = int(args.graph_type[indx_degree:indx_nodes])
        nodes = int(args.graph_type[indx_nodes + 1:])

        graphs = []
        for i in range(1000):
            graphs.append(nx.random_regular_graph(degree, nodes))

        # Note we are only using these for testing so shouldn't need this
        return graphs
    elif args.graph_type == 'tree':
        print('Creating tree graphs')
        graphs = []
        for i in range(2, 5):
            for j in range(3, 5):
                graphs.append(nx.balanced_tree(i, j))
        args.max_prev_node = 256
    elif args.graph_type == 'tree_adversarial':
        graphs = []
        # Trees that have not been seen before
        # in the standard 'trees' dataset

        # The first set includes trees
        # that have different heights than
        # those in the training data
        heights = [2, 5, 6]
        for i in range(2, 5):
            for h in heights:
                graphs.append(nx.balanced_tree(i, h))

        args.max_prev_node = 256
        return graphs
    elif args.graph_type.startswith('tree_r_edge'):
        num_edges_removed = int(args.graph_type[-1])
        # Generate full balanced trees
        print('here')
        for i in range(2, 5):
            for j in range(3, 5):
                graph = nx.balanced_tree(i, j)
                # Get the edges so we can randomly
                # remove num_edges_removed edges
                for x in range(num_edges_removed):
                    edges = graph.edges()
                    edge = np.random.randint(len(edges))
                    graph.remove_edge(edges[edge][0], edges[edge][1])

                graphs.append(graph)

        args.max_prev_node = 256
        return graphs
    elif args.graph_type.startswith('rary-tree'):
        # Generate all rary-trees for a given
        # r and height of tree.
        r = args.graph_type[-1]
        h = 4
        graphs = []
        for n in range(1, 2**h):
            graphs.append(nx.full_rary_tree(r, n))

        args.max_prev_node = 256  # This doesnt super matter
        return graphs
    elif args.graph_type.startswith('tree_r_node'):
        # Remove n nodes from the graph
        n = int(args.graph_type[-1])
        graphs = []
        for i in range(2, 5):
            for j in range(3, 5):
                graph = nx.balanced_tree(i, j)

                for x in range(n):
                    nodes = graph.nodes()
                    node = np.random.randint(len(nodes))
                    graph.remove_node(nodes[node])

                graphs.append(graph)

        args.max_prev_node = 256
        return graphs
    elif args.graph_type == 'caveman':
        # graphs = []
        # for i in range(5,10):
        #     for j in range(5,25):
        #         for k in range(5):
        #             graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
        graphs = []
        for i in range(2, 3):
            for j in range(30, 81):
                for k in range(10):
                    graphs.append(caveman_special(i, j, p_edge=0.3))
        args.max_prev_node = 100
    elif args.graph_type == 'caveman_small':
        # graphs = []
        # for i in range(2,5):
        #     for j in range(2,6):
        #         for k in range(10):
        #             graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
        graphs = []
        for i in range(2, 3):
            for j in range(6, 11):
                for k in range(20):
                    graphs.append(caveman_special(i, j,
                                                  p_edge=0.8))  # default 0.8
        args.max_prev_node = 20
    elif args.graph_type == 'caveman_small_single':
        # graphs = []
        # for i in range(2,5):
        #     for j in range(2,6):
        #         for k in range(10):
        #             graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
        graphs = []
        for i in range(2, 3):
            for j in range(8, 9):
                for k in range(100):
                    graphs.append(caveman_special(i, j, p_edge=0.5))
        args.max_prev_node = 20
    elif args.graph_type.startswith('community'):
        num_communities = int(args.graph_type[-1])
        print('Creating dataset with ', num_communities, ' communities')
        c_sizes = np.random.choice([12, 13, 14, 15, 16, 17], num_communities)
        #c_sizes = [15] * num_communities
        for k in range(3000):
            graphs.append(n_community(c_sizes, p_inter=0.01))
        args.max_prev_node = 80
    elif args.graph_type == 'grid':
        graphs = []
        for i in range(10, 20):
            for j in range(10, 20):
                graphs.append(nx.grid_2d_graph(i, j))
        args.max_prev_node = 40
    elif args.graph_type == 'grid_small':
        graphs = []
        for i in range(2, 5):
            for j in range(2, 6):
                graphs.append(nx.grid_2d_graph(i, j))
        args.max_prev_node = 15
    elif args.graph_type == 'barabasi':
        graphs = []
        for i in range(100, 200):
            for j in range(4, 5):
                for k in range(5):
                    graphs.append(nx.barabasi_albert_graph(i, j))
        args.max_prev_node = 130
    elif args.graph_type == 'barabasi_small':
        graphs = []
        for i in range(4, 21):
            for j in range(3, 4):
                for k in range(10):
                    graphs.append(nx.barabasi_albert_graph(i, j))
        args.max_prev_node = 20
    elif args.graph_type == 'grid_big':
        graphs = []
        for i in range(36, 46):
            for j in range(36, 46):
                graphs.append(nx.grid_2d_graph(i, j))
        args.max_prev_node = 90

    elif 'barabasi_noise' in args.graph_type:
        graphs = []
        for i in range(100, 101):
            for j in range(4, 5):
                for k in range(500):
                    graphs.append(nx.barabasi_albert_graph(i, j))
        graphs = perturb_new(graphs, p=args.noise / 10.0)
        args.max_prev_node = 99

    # real graphs
    elif args.graph_type == 'enzymes':
        graphs = Graph_load_batch(min_num_nodes=10,
                                  name='ENZYMES',
                                  node_attributes=True,
                                  node_labels=True)
        args.max_prev_node = 25
    elif args.graph_type == 'enzymes_small':
        graphs_raw = Graph_load_batch(min_num_nodes=10, name='ENZYMES')
        graphs = []
        for G in graphs_raw:
            if G.number_of_nodes() <= 20:
                graphs.append(G)
        args.max_prev_node = 15
    elif args.graph_type.startswith('enzymes'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=10,
                                  name='ENZYMES',
                                  node_attributes=True,
                                  node_labels=True,
                                  graph_label=graph_label)
        args.max_prev_node = 25

    elif args.graph_type == 'protein':
        graphs = Graph_load_batch(min_num_nodes=20, name='PROTEINS_full')
        args.max_prev_node = 80

    elif args.graph_type == 'DD':
        graphs = Graph_load_batch(min_num_nodes=100,
                                  max_num_nodes=500,
                                  name='DD',
                                  node_attributes=False,
                                  node_labels=True)
        args.max_prev_node = 230
    elif args.graph_type.startswith('DD'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=100,
                                  max_num_nodes=500,
                                  name='DD',
                                  node_attributes=False,
                                  node_labels=True,
                                  graph_label=graph_label)
        args.max_prev_node = 230

    elif args.graph_type == 'AIDS':  # Definitely check! Maybe train on inactive and test on active so train on 1!
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=100,
                                  name='AIDS',
                                  node_attributes=False,
                                  node_labels=True)
        args.max_prev_node = 16
    elif args.graph_type.startswith('AIDS'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=100,
                                  name='AIDS',
                                  node_attributes=False,
                                  node_labels=True,
                                  graph_label=graph_label)
        args.max_prev_node = 16

    elif args.graph_type == 'Fingerprint':  # Not so great to train on because the graph classes include several graph labels
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=26,
                                  name='Fingerprint',
                                  node_attributes=True,
                                  node_labels=False)
        args.max_prev_node = 6
    elif args.graph_type.startswith('Fingerprint'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=26,
                                  name='Fingerprint',
                                  node_attributes=True,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 6

    elif args.graph_type == 'COLLAB':  # Interesting to try but may be quite slow
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=32,
                                  max_num_nodes=492,
                                  name='COLLAB',
                                  node_attributes=False,
                                  node_labels=False)
        args.max_prev_node = 480
    elif args.graph_type.startswith('COLLAB'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=32,
                                  max_num_nodes=492,
                                  name='COLLAB',
                                  node_attributes=False,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 480

    elif args.graph_type == 'IMDB-MULTI':  #Definitely try!!
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=7,
                                  max_num_nodes=89,
                                  name='IMDB-MULTI',
                                  node_attributes=False,
                                  node_labels=False)
        args.max_prev_node = 86
    elif args.graph_type.startswith('IMDB-MULTI'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=7,
                                  max_num_nodes=89,
                                  name='IMDB-MULTI',
                                  node_attributes=False,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 86

    elif args.graph_type == 'REDDIT-MULTI-12K':  # Not done yet may be too large to really try, Maybe want to limit max
        # Gotta calc this! # Try max = 500
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=500,
                                  name='REDDIT-MULTI-12K',
                                  node_attributes=False,
                                  node_labels=False)
        args.max_prev_node = 3061
    elif args.graph_type.startswith('REDDIT-MULTI-12K'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=3782,
                                  name='REDDIT-MULTI-12K',
                                  node_attributes=False,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 3061

    elif args.graph_type == 'Letter-high':  # Could be quite interesting to look at. For example train on low distortion and test on med/high and diff letters
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=9,
                                  name='Letter-high',
                                  node_attributes=True,
                                  node_labels=False)
        args.max_prev_node = 6
    elif args.graph_type.startswith('Letter-high'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=9,
                                  name='Letter-high',
                                  node_attributes=True,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 6

    elif args.graph_type == 'Letter-med':
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=9,
                                  name='Letter-med',
                                  node_attributes=True,
                                  node_labels=False)
        args.max_prev_node = 5
    elif args.graph_type.startswith('Letter-med'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=9,
                                  name='Letter-med',
                                  node_attributes=True,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 5

    elif args.graph_type == 'Letter-low':  # For a specific letter may want to try for example the letter N
        # Gotta calc this!
        graphs = Graph_load_batch(min_num_nodes=2,
                                  max_num_nodes=8,
                                  name='Letter-low',
                                  node_attributes=True,
                                  node_labels=False)
        args.max_prev_node = 5
    elif args.graph_type.startswith('Letter-low'):
        graph_label = int(args.graph_type[-1])
        graphs = Graph_load_label(min_num_nodes=2,
                                  max_num_nodes=8,
                                  name='Letter-low',
                                  node_attributes=True,
                                  node_labels=False,
                                  graph_label=graph_label)
        args.max_prev_node = 5

    elif args.graph_type == 'citeseer':
        _, _, G = Graph_load(dataset='citeseer')
        G = max(nx.connected_component_subgraphs(G), key=len)
        G = nx.convert_node_labels_to_integers(G)
        graphs = []
        for i in range(G.number_of_nodes()):
            G_ego = nx.ego_graph(G, i, radius=3)
            if G_ego.number_of_nodes() >= 50 and (G_ego.number_of_nodes() <=
                                                  400):
                graphs.append(G_ego)
        args.max_prev_node = 250
    elif args.graph_type == 'citeseer_small':
        _, _, G = Graph_load(dataset='citeseer')
        G = max(nx.connected_component_subgraphs(G), key=len)
        G = nx.convert_node_labels_to_integers(G)
        graphs = []
        for i in range(G.number_of_nodes()):
            G_ego = nx.ego_graph(G, i, radius=1)
            if (G_ego.number_of_nodes() >= 4) and (G_ego.number_of_nodes() <=
                                                   20):
                graphs.append(G_ego)
        shuffle(graphs)
        graphs = graphs[0:200]
        args.max_prev_node = 15

    return graphs
Exemplo n.º 26
0
 def test_decoding(self):
     balanced = (((), ()), ((), ()))
     expected = nx.full_rary_tree(2, 2**3 - 1)
     actual = nx.from_nested_tuple(balanced)
     assert nx.is_isomorphic(expected, actual)
Exemplo n.º 27
0
def test_hierarchy_tree():
    G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph())
    assert_equal(nx.flow_hierarchy(G), 1.0)
Exemplo n.º 28
0
TIMESTEPS = 100
dt = SOIL_DEPTH / TIMESTEPS

# Create graphs
#g_complete = nx.complete_graph(N_NODES); g_complete.name = 'complete'
g_random = nx.gnm_random_graph(n=N_NODES, m=N_EDGES)
g_random.name = 'random'
PREFERENTIAL_NEW_EDGES = int(
    N_NODES / N_EDGES
)  # this makes same number of edges as the random, if N_EDGES is a multiple of N_NODES
g_barabasi_albert = nx.barabasi_albert_graph(n=N_NODES,
                                             m=PREFERENTIAL_NEW_EDGES)
g_barabasi_albert.name = 'preferential attachment'
R_ARY = 2  # Number of children of each node for the tree
g_rary_tree = nx.full_rary_tree(n=N_NODES, r=R_ARY)
g_rary_tree.name = 'full tree'
g_random_tree = nx.random_tree(n=N_NODES)
g_random_tree.name = 'random tree'

graphs = tuple([g_random, g_barabasi_albert, g_rary_tree, g_random_tree])
# Add properties to nodes: depth, o2.
surface_nodes = {}
graph_list = [(i, graph.name) for i, graph in enumerate(graphs)]

for graph in graphs:
    initialize_node_properties(graph)
    # Compute surface nodes
    surface_nodes[graph.name] = tuple([
        node for node, attr in graph.nodes(data=True)
        if attr['depth'] < SOIL_DEPTH / 100.
Exemplo n.º 29
0
class TestSimulatedAnnealing(unittest.TestCase):

    # Create a valid J, h and offset
    J_random = np.random.rand(200, 200)
    J_not_symmetric = J_random - np.diag(np.diag(J_random))
    J_valid = (J_not_symmetric + J_not_symmetric.T) / 2

    h = np.random.rand(5)
    h_valid = np.random.rand(len(J_valid[0]))

    offset_valid = np.random.rand() * 1000

    # Create a valid temperature function
    t = Variable("t", float)
    temp_t_valid = t**3 + 8 * t + 1
    #     temp_t_valid = 2 * (1 - t) +  0.01 * t

    # Invalid temp_t
    t = Variable("p", float)
    temp_t_invalid_1 = 31 * t**2 + 7

    # Another invalid temp_t
    t = Variable("t", float)
    q = Variable("q", float)
    temp_t_invalid_2 = 15 * t**2 - 8 * q

    # And for the number of steps
    n_steps_invalid = -200
    n_steps_valid = 5000

    # Create an actual problem
    problem = MaxCut(nx.full_rary_tree(5, 77))

    # Prepare a Job for this problem
    job_valid = problem.to_job()

    def test_creation(self):
        # Check that no exception is raised
        qpu = SimulatedAnnealing(temp_t=TestSimulatedAnnealing.temp_t_valid,
                                 n_steps=TestSimulatedAnnealing.n_steps_valid)

        # Now check if the proper exceptions are raised - temp_t
        with pytest.raises(ValueError):
            assert qpu == SimulatedAnnealing(
                temp_t=None, n_steps=TestSimulatedAnnealing.n_steps_valid)
        with pytest.raises(TypeError):
            assert qpu == SimulatedAnnealing(
                temp_t=TestSimulatedAnnealing.temp_t_invalid_1,
                n_steps=TestSimulatedAnnealing.n_steps_valid)
        with pytest.raises(TypeError):
            assert qpu == SimulatedAnnealing(
                temp_t=TestSimulatedAnnealing.temp_t_invalid_2,
                n_steps=TestSimulatedAnnealing.n_steps_valid)

        # Check the exceptions for the number of annealing steps
        with pytest.raises(ValueError):
            assert qpu == SimulatedAnnealing(
                temp_t=TestSimulatedAnnealing.temp_t_valid, n_steps=None)
        with pytest.raises(ValueError):
            assert qpu == SimulatedAnnealing(
                temp_t=TestSimulatedAnnealing.temp_t_valid,
                n_steps=TestSimulatedAnnealing.n_steps_invalid)

        # And a check for the exception raised by negative number of seeds
        with pytest.raises(ValueError):
            assert qpu == SimulatedAnnealing(
                temp_t=TestSimulatedAnnealing.temp_t_valid,
                n_steps=TestSimulatedAnnealing.n_steps_valid,
                seed=-1298)

    def test_submit_job(self):

        # Create a valid qpu
        qpu_valid = SimulatedAnnealing(
            temp_t=TestSimulatedAnnealing.temp_t_valid,
            n_steps=TestSimulatedAnnealing.n_steps_valid,
            seed=8017)

        # Create an Observable Job and check that such Jobs are not dealt with by the qpu
        observable = Observable(5)
        job = Job(observable=observable)
        with pytest.raises(exceptions_types.QPUException):
            assert result == qpu_valid.submit_job(job)

        # Create a circuit Job a and check that such Jobs are not dealt with by the qpu
        from qat.lang.AQASM import Program, H
        prog = Program()
        reg = prog.qalloc(1)
        prog.apply(H, reg)
        prog.reset(reg)
        with pytest.raises(exceptions_types.QPUException):
            assert result == qpu_valid.submit(prog.to_circ().to_job(nbshots=1))

        # Create a Job from a Schedule with empty drive and check that such
        # Jobs are not dealt with by the qpu
        schedule = Schedule()
        job = Job(schedule=schedule)
        with pytest.raises(exceptions_types.QPUException):
            assert result == qpu_valid.submit_job(job)

        # Create a job from a Schedule with a drive with more than one observable
        # or an observable with coefficient not 1 to check that such Jobs don't work
        # with the qpu
        observable = get_observable(TestSimulatedAnnealing.J_valid,
                                    TestSimulatedAnnealing.h_valid,
                                    TestSimulatedAnnealing.offset_valid)
        drive_invalid_1 = [(1, observable), (1, observable)]
        schedule = Schedule(drive=drive_invalid_1)
        job = schedule.to_job()
        with pytest.raises(exceptions_types.QPUException):
            assert result == qpu_valid.submit_job(job)
        drive_invalid_2 = [(5, observable)]
        schedule = Schedule(drive=drive_invalid_2)
        job = schedule.to_job()
        with pytest.raises(exceptions_types.QPUException):
            assert result == qpu_valid.submit_job(job)

        # Solve the problem and check that the returned result is Result
        result = qpu_valid.submit_job(TestSimulatedAnnealing.job_valid)
        assert isinstance(result, Result)

    def test_spins_to_integer_and_back_translations(self):
        """
        Tests if the respective two methods in service.py translate
        backwards and forwards properly
        """

        random_spin_config = np.random.randint(2, size=10) * 2 - 1
        random_spin_config_size = len(random_spin_config)
        random_spin_config_to_int = spins_to_integer(random_spin_config)
        translated_spin_config = integer_to_spins(random_spin_config_to_int,
                                                  random_spin_config_size)
        translated_int = spins_to_integer(translated_spin_config)

        assert np.array_equal(random_spin_config, translated_spin_config)
        assert random_spin_config_to_int == translated_int

    def test_extract_j_and_h_from_obs(self):
        """
        Tests if the J coupling and h magnetic field are properly
        extracted from an observable.
        First creates an observable for some given J and h, then
        extracts the J and h, then compars with the inital ones.
        """

        # Create an Observable and use the tested method to get back J, h and the offset
        observable = get_observable(TestSimulatedAnnealing.J_valid,
                                    TestSimulatedAnnealing.h_valid,
                                    TestSimulatedAnnealing.offset_valid)
        J_extracted, h_extracted, offset_extracted = extract_j_and_h_from_obs(
            observable)

        assert np.array_equal(TestSimulatedAnnealing.J_valid, J_extracted)
        assert np.array_equal(TestSimulatedAnnealing.h_valid, h_extracted)
        assert TestSimulatedAnnealing.offset_valid == offset_extracted
Exemplo n.º 30
0
def test_is_aperiodic_rary_tree():
    G = nx.full_rary_tree(3, 27, create_using=nx.DiGraph())
    assert_false(nx.is_aperiodic(G))
Exemplo n.º 31
0
 def test_decoding(self):
     balanced = (((), ()), ((), ()))
     expected = nx.full_rary_tree(2, 2 ** 3 - 1)
     actual = nx.from_nested_tuple(balanced)
     assert_true(nx.is_isomorphic(expected, actual))
Exemplo n.º 32
0
 def test_basic(self):
     """Tests for joining multiple subtrees at a root node."""
     trees = [(nx.full_rary_tree(2, 2**2 - 1), 0) for i in range(2)]
     actual = nx.join(trees)
     expected = nx.full_rary_tree(2, 2**3 - 1)
     assert nx.is_isomorphic(actual, expected)
Exemplo n.º 33
0
 def test_encoding(self):
     T = nx.full_rary_tree(2, 2 ** 3 - 1)
     expected = (((), ()), ((), ()))
     actual = nx.to_nested_tuple(T, 0)
     assert_equal(expected, actual)
Exemplo n.º 34
0
 def test_result_full_5ary_tree_4_tall(self):
     assert (calc_and_compare(NX.full_rary_tree(5, 4)))
Exemplo n.º 35
0
def gen_graph(tree_size, path_len):
    T = nx.full_rary_tree(2, tree_size)
    P = nx.path_graph(path_len)
    return nx.convert_node_labels_to_integers(nx.cartesian_product(T, P))