def simulate_affiliation_dpe(): nrange = [400] #50*2**np.arange(3) drange = np.arange(1,5) embed = [Embed.dot_product_embed, Embed.dot_product_embed_unscaled, Embed.normalized_laplacian_embed, Embed.normalized_laplacian_embed_scaled] k = 2 p = .15 q = .1 for n in nrange: G = rg.affiliation_model(n, k, p, q) for d in drange: print n*k,d, for e in embed: Embed.cluster_vertices_kmeans(G, e, d, k, 'kmeans') print num_diffs_w_perms_graph(G, 'block', 'kmeans'), print plot.matshow(nx.adj_matrix(G)) plot.show()
def embedding_vs_dimension_performance(): n = 50 drange = np.arange(1,5) embed = [Embed.dot_product_embed, Embed.dot_product_embed_unscaled, Embed.normalized_laplacian_embed, Embed.normalized_laplacian_embed_scaled] nmc = 10 k = 2 p = .5 q = .1 all_params = list(IterGrid({'d':drange, 'embed':embed})) [param.update({'num_diff':np.zeros(nmc),'rand_idx':np.zeros(nmc)}) for param in all_params] for mc in np.arange(nmc): print mc G = rg.affiliation_model(n, k, p, q) truth = nx.get_node_attributes(G, 'block').values() for param in all_params: pred = Embed.cluster_vertices_kmeans(G, param['embed'], param['d'], 2) param['num_diff'][mc] = num_diff_w_perms(truth, pred) param['rand_idx'][mc] = metrics.adjusted_rand_score(truth, pred) return all_params
def run_test(size): vertices = [Vertex(n) for n in range(0, size)] p = 0.5 for i in range(1, iterations): connected_count = 0.0 for j in range(0, tries): g = RandomGraph(vertices) g.add_random_edges(p) if g.is_connected(): connected_count = connected_count + 1.0 percent = connected_count / tries print "p: %f, average connected = %f" % (p, percent) offset = 1.0 / 2 ** (i + 1) if percent > 0.5: p = p - offset else: p = p + offset
def test_affiliation(self): n=10 k=3 p=.15 q=.1 G=rg.affiliation_model(n, k, p, q) assert_equal(G.number_of_nodes(),n*k) assert_true(G.number_of_edges() <=(n*k)**2)
def test_affiliation(self): n = 10 k = 3 p = .15 q = .1 G = rg.affiliation_model(n, k, p, q) assert_equal(G.number_of_nodes(), n * k) assert_true(G.number_of_edges() <= (n * k)**2)
def connect_vs_p(n, trials): ''' Plot average connectedness of graph w/ degree (n) over p = 0:100, (trials) times''' connected = [] xs = [i / 100 for i in xrange(101)] vs = [Vertex('v' + str(i)) for i in xrange(n)] for i in xs: print i subconnected = [] for j in xrange(trials): g = RandomGraph(vs) g.add_random_edges(i) if g.is_connected(): subconnected.append(1) else: subconnected.append(0) connected.append(subconnected) ys = [sum(sub) / trials for sub in connected] return xs, ys
def connect_vs_p(n, trials): ''' Plot average connectedness of graph w/ degree (n) over p = 0:100, (trials) times''' connected = [] xs = [i/100 for i in xrange(101)] vs = [Vertex('v'+str(i)) for i in xrange(n)] for i in xs: print i subconnected = [] for j in xrange(trials): g = RandomGraph(vs) g.add_random_edges(i) if g.is_connected(): subconnected.append(1) else: subconnected.append(0) connected.append(subconnected) ys = [sum(sub)/trials for sub in connected] return xs, ys
def test_SBM(self): seed = np.random.randint(45) # generate stochastic block model parameters k = 3 # number of blocks n_min = 2 # minimum number of vertices per block n_max = np.random.randint(15) # maximum number of vertices per block nvec = np.random.random_integers(n_min, n_max, k) # number of vertices per block n = nvec.sum() # number of vertices B = np.random.uniform( 0, 1, (k, k)) # probabilities of connections between all blocks G = rg.SBM(nvec, B) assert_equal(G.number_of_nodes(), n) assert_true(G.number_of_edges() <= n**2) G = rg.SBM(nvec, B, seed=seed) assert_equal(G.number_of_nodes(), n) assert_true(G.number_of_edges() <= n**2)
def _do_mc(self): G = self.rgg.generate_graph() for eps in self.epsRange: Gerr = rg.get_errorful_subgraph(G, int(self.errFunc(eps)), eps) self.embed.embed(Gerr) for d in self.dRange: x = self.embed.get_scaled(d) vnRes = vn.vn_metrics(x, self.observed, self.notObserved) vnRes.run() self.vnResults[(eps,d)].append(vnRes) mclustRes = vn.mclust_performance(x,self.block) mclustRes.run() self.mclustResults[(eps,d)].append(mclustRes) kmeansRes = vn.kmeans_performance(x, self.block, self.kRange) kmeansRes.run() self.kmeansResults[(eps,d)].append(kmeansRes)
randomDiGraph_HierholzerAlgo_File, randomGraphWithBarabasiAlbertModel_HierholzerAlgo_File, \ randomGraphWithWattsStrogatzModel_HierholzerAlgo_File, randomGraphWithBarabasiAlbertModel_FleuryAlgo_File, \ randomGraphWithWattsStrogatzModel_FleuryAlgo_File = openFile() randomDiGraph_Param, randomGraphWithBarabasiAlbertModel_Param, randomGraphWithWattsStrogatzModel_Param = setParam( ) ############################################################### for param in randomDiGraph_Param: allTimes = 0 edges = 0 for _ in range(numberOfSamples): filename = "stat/graph/diGraph_node{0}_sample{1}.txt".format( param[0], _) graph = RandomGraph.getRandomDiGraph(param[0], param[1]) networkx.write_adjlist(graph, filename) edges += len(graph.edges) start = time.time() HierholzerAlgorithm.HierholzerAlgorithm(graph, True, 0) end = time.time() allTimes += end - start randomDiGraph_HierholzerAlgo_File.write("{0:>7} {1:>20} {2:>50}\n".format( param[0], edges / numberOfSamples, allTimes / numberOfSamples)) ############################################################### for param in randomGraphWithBarabasiAlbertModel_Param:
def test_DPE(self): G = rg.ER(5, 0.1) return G
import igraph, math, random from Units import * from graph_helpers import * from chisel_module_helpers import * from generate_chisel import * from RandomGraph import * if __name__ == "__main__": rg = RandomGraph(L=0) rg.build_graph(N=30, IN_W=100) write_random_graph(rg) # visulaize into graph rg.add_visualization_features() rg.save_graph_pdf() # N = 3 # g = Graph(n=N,directed=True) # # generate units # g.vs[0]["unit"] = Unit(type=UnitType.COMPUTE) # g.vs[1]["unit"] = Unit(type=UnitType.MEMORY) # g.vs[2]["unit"] = Unit(type=UnitType.NETWORK) # # connect units based on io # # algorithm for this...? # for i in range(N-1): # g.add_edges([(i,i+1)]) # # r/w memory # if (g.vs[i]["unit"].type == UnitType.MEMORY): # g.add_edges([(i,i-1)])
def test_ErdosRenyi(self): n = 10 G = rg.ER(n, 0.5) assert_equal(G.number_of_nodes(), n) assert_true(G.number_of_edges() <= misc.comb(n, 2, exact=1))