def test_compute_single_query(self): fq=smp.FrontendQuery(self.G) self.assertTrue(len(smp.FeatureComputer.compute_single_query(fq, smp.SimpleGraphDegree()))>0) fq_none=smp.FrontendQuery(None) self.assertEqual(smp.FeatureComputer.compute_single_query(fq_none, smp.SimpleGraphDegree()),None) bq=smp.BackendQuery(self.G,self.S) self.assertTrue(len(smp.FeatureComputer.compute_single_query(bq, smp.SimpleGraphDegree()))>0) bq_none=smp.BackendQuery(None, self.S) self.assertEqual(smp.FeatureComputer.compute_single_query(bq_none, smp.SimpleGraphDegree()),None) bq_none=smp.BackendQuery(self.G, None) self.assertEqual(smp.FeatureComputer.compute_single_query(bq_none, smp.SimpleGraphDegree()),None)
# ba_g2 = dict(Counter(ba_g.values())) # ba_gx,ba_gy = log_binning(ba_g2,50) #degree distribution # plt.figure(1) # plt.xscale('log') # plt.yscale('log') # # plt.scatter(ba_gx,ba_gy,c='r',marker='s',s=50) # plt.scatter(ba_g2.keys(),ba_g2.values(),c='b',marker='x') # plt.xlabel('Connections (normalized)') # plt.ylabel('Frequency') # # plt.xlim((1e-4,1e-1)) # # plt.ylim((.9,1e4)) # plt.show() original_degree = smp.SimpleGraphDegree() original_degree_dis = original_degree.compute_frontend_distribution( original) print('original_degree_dis', original_degree_dis) print('\n') original_clusteringcoefficient = smp.SimpleGraphClusteringCoefficient( ) original_clusteringcoefficient_dis = original_clusteringcoefficient.compute_frontend_distribution( original) print('original_clusteringcoefficient_dis', original_clusteringcoefficient_dis) print('\n') original_pathlength = smp.SimpleGraphPathLength() original_pathlength_dis = original_pathlength.compute_frontend_distribution(
return feature_dic if __name__ == "__main__": import networkx as nx import sampling as smp from sampling.sampling_algorithms import * # G = nx.barabasi_albert_graph(10, 4, 3) nx.write_edgelist(G, 'G.edgelist') sample_graph = induced_random_edge_sampler( G, 5, stopping_condition='UNIQUE_NODES', with_replacement=True) print sample_graph.nodes() print sample_graph.edges() nx.write_edgelist(sample_graph, 'sample.edgelist', data=False) q_list = [BackendQuery(), FrontendQuery()] f_list = [smp.SimpleGraphDegree()] query_list = ['BackendQuery', 'FrontendQuery'] feature_list = [ 'SimpleGraphDegree', 'SimpleGraphClusteringCoefficient', 'SimpleGraphPathLength' ] compute_all_features(G, sample_graph, feature_list, query_list) # ex=QueryExecuter() # print ex.compute_all_queries(G, sample_graph, f_list, q_list) # print compute_all_backend_queries(G, sample_graph, f_list, q_list)
current_subgraph = new_subgraph s.remove(w) else: s.remove(v) if __divergence(G, current_subgraph, divergence, feature) < __divergence( G, best_subgraph, divergence, feature): best_subgraph = current_subgraph div_scores[i] = __divergence(G, best_subgraph, divergence, feature) T = gamma * T nx.write_gml(best_subgraph, "best_subgraph.gml") return best_subgraph, div_scores if __name__ == '__main__': import analytics import pylab G = nx.barabasi_albert_graph(1000, 10, 1) p = 10 * G.number_of_edges() * log10( G.number_of_nodes()) / G.number_of_nodes() best, div = metropolis_subgraph_sampler( G, 100, analytics.DivergenceMetrics.JensenShannonDivergence, smp.SimpleGraphDegree(), 1000, p, 10, 2) #print 'div:',div Series(div).plot() gca().set_ylabel('JS-Divergence') gca().set_xlabel('# of iterations') pylab.show() #pylab.plot(div)