Example #1
0
def iso_graphs(version=2):
    # Return the list of all graphs with up to seven nodes named in the Graph Atlas.
    if version == 1:
        graphs = []
        n = len(nx.graph_atlas_g())
        for i in range(n):
            graphs.append([nx.graph_atlas(i)])
        return graphs
    elif version == 2:
        graphs = nx.graph_atlas_g()
        graphs = [[g] for g in graphs]
        return graphs[1:]
Example #2
0
def atlas6():
    """ Return the atlas of all connected graphs of 6 nodes or less.
        Attempt to check for isomorphisms and remove.
    """

    Atlas=nx.graph_atlas_g()[0:208] # 208
    # remove isolated nodes, only connected graphs are left
    U=nx.Graph() # graph for union of all graphs in atlas
    for G in Atlas: 
        zerodegree=[n for n in G if G.degree(n)==0]
        for n in zerodegree:
            G.remove_node(n)
        U=nx.disjoint_union(U,G)

    # list of graphs of all connected components        
    C=nx.connected_component_subgraphs(U)        
    
    UU=nx.Graph()        
    # do quick isomorphic-like check, not a true isomorphism checker     
    nlist=[] # list of nonisomorphic graphs
    for G in C:
        # check against all nonisomorphic graphs so far
        if not iso(G,nlist):
            nlist.append(G)
            UU=nx.disjoint_union(UU,G) # union the nonisomorphic graphs  
    return UU            
Example #3
0
def atlas6():
    """ Return the atlas of all connected graphs of 6 nodes or less.
        Attempt to check for isomorphisms and remove.
    """

    Atlas = nx.graph_atlas_g()[0:208]  # 208
    # remove isolated nodes, only connected graphs are left
    U = nx.Graph()  # graph for union of all graphs in atlas
    for G in Atlas:
        zerodegree = [n for n in G if G.degree(n) == 0]
        for n in zerodegree:
            G.remove_node(n)
        U = nx.disjoint_union(U, G)

    # list of graphs of all connected components
    C = nx.connected_component_subgraphs(U)

    UU = nx.Graph()
    # do quick isomorphic-like check, not a true isomorphism checker
    nlist = []  # list of nonisomorphic graphs
    for G in C:
        # check against all nonisomorphic graphs so far
        if not iso(G, nlist):
            nlist.append(G)
            UU = nx.disjoint_union(UU, G)  # union the nonisomorphic graphs
    return UU
Example #4
0
def count_exact(queries, targets, args):
    print("WARNING: orca only works for node anchored")
    # TODO: non node anchored
    n_matches_baseline = np.zeros(73)
    for target in targets:
        counts = np.array(orca.orbit_counts("node", 5, target))
        if args.count_method == "bin":
            counts = np.sign(counts)
        counts = np.sum(counts, axis=0)
        n_matches_baseline += counts
    # don't include size < 5
    n_matches_baseline = list(n_matches_baseline)[15:]
    counts5 = []
    num5 = 10  #len([q for q in queries if len(q) == 5])
    for x in list(sorted(n_matches_baseline, reverse=True))[:num5]:
        print(x)
        counts5.append(x)
    print("Average for size 5:", np.mean(np.log10(counts5)))

    atlas = [
        g for g in nx.graph_atlas_g()[1:] if nx.is_connected(g) and len(g) == 6
    ]
    queries = []
    for g in atlas:
        for v in g.nodes:
            g = g.copy()
            nx.set_node_attributes(g, 0, name="anchor")
            g.nodes[v]["anchor"] = 1
            is_dup = False
            for g2 in queries:
                if nx.is_isomorphic(
                        g,
                        g2,
                        node_match=(lambda a, b: a["anchor"] == b["anchor"])
                        if args.node_anchored else None):
                    is_dup = True
                    break
            if not is_dup:
                queries.append(g)
    print(len(queries))
    n_matches_baseline = count_graphlets(queries,
                                         targets,
                                         n_workers=args.n_workers,
                                         method=args.count_method,
                                         node_anchored=args.node_anchored,
                                         min_count=10000)
    counts6 = []
    num6 = 20  #len([q for q in queries if len(q) == 6])
    for x in list(sorted(n_matches_baseline, reverse=True))[:num6]:
        print(x)
        counts6.append(x)
    print("Average for size 6:", np.mean(np.log10(counts6)))
    return counts5 + counts6
def test_graph_atlas():
    #Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
    Atlas = nx.graph_atlas_g()[0:100]
    alphabet = range(26)
    for graph in Atlas:
        nlist = graph.nodes()
        labels = alphabet[:len(nlist)]
        for s in range(10):
            random.shuffle(labels)
            d = dict(zip(nlist,labels))
            relabel = nx.relabel_nodes(graph, d)
            gm = vf2.GraphMatcher(graph, relabel)
            assert_true(gm.is_isomorphic())
Example #6
0
def atlas6():
    """Return the atlas of all connected graphs with at most 6 nodes"""

    Atlas = nx.graph_atlas_g()[
        3:209]  # 0, 1, 2 => no edges. 208 is last 6 node graph
    U = nx.Graph()  # graph for union of all graphs in atlas
    for G in Atlas:
        # check if connected
        if nx.number_connected_components(G) == 1:
            # check if isomorphic to a previous graph
            if not GraphMatcher(U, G).subgraph_is_isomorphic():
                U = nx.disjoint_union(U, G)
    return U
Example #7
0
def test_graph_atlas():
    #Atlas = nx.graph_atlas_g()[0:208] # 208, 6 nodes or less
    Atlas = nx.graph_atlas_g()[0:100]
    alphabet = list(range(26))
    for graph in Atlas:
        nlist = graph.nodes()
        labels = alphabet[:len(nlist)]
        for s in range(10):
            random.shuffle(labels)
            d = dict(zip(nlist, labels))
            relabel = nx.relabel_nodes(graph, d)
            gm = iso.GraphMatcher(graph, relabel)
            assert_true(gm.is_isomorphic())
Example #8
0
def load_dataset(name):
    """ Load real-world datasets, available in PyTorch Geometric.

    Used as a helper for DiskDataSource.
    """
    task = "graph"
    if name == "enzymes":
        dataset = TUDataset(root="/tmp/ENZYMES", name="ENZYMES")
    elif name == "proteins":
        dataset = TUDataset(root="/tmp/PROTEINS", name="PROTEINS")
    elif name == "cox2":
        dataset = TUDataset(root="/tmp/cox2", name="COX2")
    elif name == "aids":
        dataset = TUDataset(root="/tmp/AIDS", name="AIDS")
    elif name == "reddit-binary":
        dataset = TUDataset(root="/tmp/REDDIT-BINARY", name="REDDIT-BINARY")
    elif name == "imdb-binary":
        dataset = TUDataset(root="/tmp/IMDB-BINARY", name="IMDB-BINARY")
    elif name == "firstmm_db":
        dataset = TUDataset(root="/tmp/FIRSTMM_DB", name="FIRSTMM_DB")
    elif name == "dblp":
        dataset = TUDataset(root="/tmp/DBLP_v1", name="DBLP_v1")
    elif name == "ppi":
        dataset = PPI(root="/tmp/PPI")
    elif name == "qm9":
        dataset = QM9(root="/tmp/QM9")
    elif name == "atlas":
        dataset = [g for g in nx.graph_atlas_g()[1:] if nx.is_connected(g)]
    if task == "graph":
        train_len = int(0.8 * len(dataset))
        train, test = [], []
        dataset = list(dataset)
        random.shuffle(dataset)
        has_name = hasattr(dataset[0], "name")
        for i, graph in tqdm(enumerate(dataset)):
            if not type(graph) == nx.Graph:
                if has_name: del graph.name
                graph = pyg_utils.to_networkx(graph).to_undirected()
            if i < train_len:
                train.append(graph)
            else:
                test.append(graph)
    return train, test, task
def load_dataset(name):
    """ Load real-world datasets, available in PyTorch Geometric.

    Used as a helper for DiskDataSource.
    """
    task = "graph"
    if name == "enzymes":
        dataset = TUDataset(root="/tmp/ENZYMES", name="ENZYMES")
    elif name == "proteins":
        dataset = TUDataset(root="/tmp/PROTEINS", name="PROTEINS")
    elif name == "cox2":
        dataset = TUDataset(root="/tmp/cox2", name="COX2")
    elif name == "aids":
        dataset = TUDataset(root="/tmp/AIDS", name="AIDS")
    elif name == "reddit-binary":
        dataset = TUDataset(root="/tmp/REDDIT-BINARY", name="REDDIT-BINARY")
    elif name == "imdb-binary":
        dataset = TUDataset(root="/tmp/IMDB-BINARY", name="IMDB-BINARY")
    elif name == "firstmm_db":
        dataset = TUDataset(root="/tmp/FIRSTMM_DB", name="FIRSTMM_DB")
    elif name == "dblp":
        dataset = TUDataset(root="/tmp/DBLP_v1", name="DBLP_v1")
    elif name == "ppi":
        dataset = PPI(root="/tmp/PPI")
    elif name == "qm9":
        dataset = QM9(root="/tmp/QM9")
    elif name == "atlas":
        dataset = [g for g in nx.graph_atlas_g()[1:] if nx.is_connected(g)]
    elif name == 'aifb':
        dataset = Entities(root="/tmp/aifb", name='AIFB')  # 90 edge types
    elif name == 'wn18':
        dataset = WordNet18(root="/tmp/wn18")
    elif name == 'fb15k237':
        dataset = [None]
    if task == "graph":
        train_len = int(0.8 * len(dataset))
        train, test = [], []
        if name not in ['aifb', 'wn18', 'fb15k237']:
            dataset = list(dataset)
            random.shuffle(dataset)
            has_name = hasattr(dataset[0], "name")
        else:
            has_name = True
        for i, graph in tqdm(enumerate(dataset)):
            if not type(graph) == nx.Graph:
                try:
                    if has_name: del graph.name
                except:
                    pass
                if name == 'aifb':
                    graph = pyg_utils.to_networkx(graph,
                                                  edge_attrs=['edge_type'])
                elif name == 'wn18':
                    graph = pyg_utils.to_networkx(graph,
                                                  edge_attrs=['edge_type'])
                elif name == 'fb15k237':
                    data = FB15k_237()
                    (graph, _, _, _) = data.load()
                    graph = graph.to_networkx()
                    edge_type_dict = []
                    for j in graph.edges:
                        edge_type_dict.append(graph.edges[j]['label'])
                    edge_type_dict = {
                        i: ind
                        for ind, i in enumerate(sorted(set(edge_type_dict)))
                    }

                    for j in graph.edges:
                        graph.edges[j]['edge_type'] = edge_type_dict[
                            graph.edges[j]['label']]
                        del graph.edges[j]['label']
                        del graph.edges[j]['weight']
                else:
                    graph = pyg_utils.to_networkx(graph).to_undirected()
            if name == 'aifb':
                train.append(graph)
                test.append(deepcopy(graph))
            elif name == 'wn18':
                train.append(graph)
                test.append(deepcopy(graph))
            elif name == 'fb15k237':
                train.append(graph)
                test.append(deepcopy(graph))
            else:
                if i < train_len:
                    train.append(graph)
                else:
                    test.append(graph)

    return train, test, task
Example #10
0
def generate_small_graphs(max_size):
    return [
        g for g in nx.graph_atlas_g()
        if len(g) > 1 and len(g) <= max_size and nx.is_connected(g)
    ]
 def setup_class(cls):
     cls.GAG = graph_atlas_g()
 def setUp(self):
     self.GAG=nx.graph_atlas_g()
Example #13
0
 def setUp(self):
     self.GAG = nx.graph_atlas_g()
Example #14
0
plt.show()

#%%
#%%
import pandas as pd
import numpy as np
price = pd.Series(np.random.randn(150).cumsum(),
                  index=pd.date_range('2000-1-1', periods=150, freq='B'))

ma = price.rolling(20).mean()

mstd = price.rolling(20).std()

plt.figure()

plt.plot(price.index, price, 'k')

plt.plot(ma.index, ma, 'b')

plt.fill_between(mstd.index,
                 ma - 2 * mstd,
                 ma + 2 * mstd,
                 color='b',
                 alpha=0.2)

plt.show()
#%%
import networkx as nx
GAG = nx.graph_atlas_g()
nx.draw(GAG[123])
plt.show()
Example #15
0
"""
Atlas
=====

An example showing how to write first 20 graphs from the graph atlas as
graphviz dot files Gn.dot where n=0,19.

TODO: does nx_agraph.draw support multiple graphs in one png?
"""

import networkx as nx

atlas = nx.graph_atlas_g()[0:20]

for G in atlas:
    print(G)
    A = nx.nx_agraph.to_agraph(G)
    A.graph_attr["label"] = G.name
    # set default node attributes
    A.node_attr["color"] = "red"
    A.node_attr["style"] = "filled"
    A.node_attr["shape"] = "circle"
    A.write(G.name + ".dot")

# Draw the 20th graph from the atlas to png
A.draw("A20.png", prog="neato")
Example #16
0
def test_atlas():
    for graph in nx.graph_atlas_g():
        deg = list(graph.degree().values())
        assert_true( nx.is_valid_degree_sequence(deg, method='eg') )
        assert_true( nx.is_valid_degree_sequence(deg, method='hh') )        
Example #17
0
def test_atlas():
    for graph in nx.graph_atlas_g():
        deg = list(graph.degree().values())
        assert_true( nx.is_valid_degree_sequence(deg, method='eg') )
        assert_true( nx.is_valid_degree_sequence(deg, method='hh') )        
Example #18
0
import networkx as nx
from classical import brute_force

# Defining classical optimizer necessary for QAOA
optimizer = optimizers.SPSA()

# Backend
backend = Aer.get_backend('qasm_simulator')  #simulator

# Producing the data
p_min, p_max = 1, 3

# Putting the whole atlas in one data frame
output = pd.DataFrame()

for i in range(3, len(nx.graph_atlas_g())):
    G = nx.graph_atlas(i)
    n = len(G.nodes)
    m = len(G.edges)
    d = m / (n * (n - 1) / 2
             )  # density of graph - number of edges / number of possible edges

    optimum = brute_force(G)
    print("\nGraph " + str(i) + ": n = " + str(n), ", m = " + str(m),
          ", d = " + str(d), "the optimum cut is " + str(optimum) + "\n")

    if m > 0:  # Method does not work if there are no edges
        for p in range(p_min, p_max + 1):
            # specifications
            specs = {
                'graph_atlas index': i,