def test_shortest_path(): """Picklability test for the Shortest Path kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) sp_kernel = ShortestPath(verbose=verbose, normalize=normalize) sp_kernel.fit(train) assert is_picklable(sp_kernel) train, _ = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('na', 5)) sp_kernel = ShortestPathAttr(verbose=verbose, normalize=normalize) sp_kernel.fit(train) assert is_picklable(sp_kernel)
def test_subgraph_matching(): """Picklability test for the Subgraph Matching kernel.""" # node-label/edge-label train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3, 'el', 4)) sm_kernel = SubgraphMatching(verbose=verbose, normalize=normalize) sm_kernel.fit(train) assert is_picklable(sm_kernel) # node-label/edge-attribute train, _ = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('nl', 3, 'ea', 5)) sm_kernel = SubgraphMatching(verbose=verbose, normalize=normalize, ke=np.dot) sm_kernel.fit(train) assert is_picklable(sm_kernel) # node-attribute/edge-label train, _ = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('na', 4, 'el', 3)) sm_kernel = SubgraphMatching(verbose=verbose, normalize=normalize, kv=np.dot) sm_kernel.fit(train) assert is_picklable(sm_kernel) # node-attribute/edge-attribute train, _ = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('na', 4, 'ea', 6)) sm_kernel = SubgraphMatching(verbose=verbose, normalize=normalize, ke=np.dot, kv=np.dot) sm_kernel.fit(train) assert is_picklable(sm_kernel)
def test_shortest_path(): """Random input test for the Shortest Path kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) sp_kernel = ShortestPath(verbose=verbose, normalize=normalize) try: sp_kernel.fit_transform(train) sp_kernel.transform(test) assert True except Exception as exception: assert False, exception train, test = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('na', 5)) sp_kernel = ShortestPathAttr(verbose=verbose, normalize=normalize) try: sp_kernel.fit_transform(train) sp_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_propagation(): """Picklability test for the Propagation kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('nl', 4)) propagation_kernel = Propagation(verbose=verbose, normalize=normalize) propagation_kernel.fit(train) assert is_picklable(propagation_kernel) train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('na', 5)) propagation_kernel_attr = PropagationAttr(verbose=verbose, normalize=normalize) propagation_kernel_attr.fit(train) assert is_picklable(propagation_kernel_attr)
def test_multiscale_laplacian_fast_pd(): """Random input test for the Fast Multiscale Laplacian kernel [n_jobs=-1/generic-wrapper].""" # Initialise kernel train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('na', 5)) gk = GraphKernel(kernel={ "name": "ML", "which": "fast" }, verbose=verbose, normalize=normalize, n_jobs=-1) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_graphlet_sampling(): """Random input test for the Graphlet Sampling Kernel [+ generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) gs_kernel = GraphletSampling(verbose=verbose, normalize=normalize, sampling=dict(n_samples=50)) gk = GraphKernel(kernel={ "name": "GR", "sampling": { "n_samples": 50 } }, verbose=verbose, normalize=normalize) try: gs_kernel.fit_transform(train) gs_kernel.transform(test) gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_random_walk_labels_pd(): """Random input test for the Labelled Random Walk kernel [n_jobs=-1/generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(0.01, 12.0), n_graphs_test=40, random_state=rs, features=('nl', 3)) gk = GraphKernel( kernel={ "name": "RW", "with_labels": True }, verbose=verbose, normalize=normalize, ) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_core_framework(): """Random input test for the Core kernel Framework [+ generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 4)) base_kernel = (WeisfeilerLehman, dict(base_kernel=VertexHistogram)) core_framework = CoreFramework(verbose=verbose, normalize=normalize, base_kernel=base_kernel) kernel = ["CORE", "WL"] gk = GraphKernel(kernel=kernel, verbose=verbose, normalize=normalize) try: core_framework.fit_transform(train) core_framework.transform(test) gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_graphlet_sampling(): """Picklability test for the Graphlet Sampling Kernel [+ generic-wrapper].""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) gs_kernel = GraphletSampling(verbose=verbose, normalize=normalize, sampling=dict(n_samples=50)) gk = GraphKernel(kernel={ "name": "graphlet_sampling", "sampling": { "n_samples": 50 } }, verbose=verbose, normalize=normalize) gs_kernel.fit(train) assert is_picklable(gs_kernel) gk.fit(train) assert is_picklable(gk)
def test_core_framework(): """Picklability test for the Core kernel Framework [+ generic-wrapper].""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 4)) base_graph_kernel = (WeisfeilerLehman, dict(base_graph_kernel=VertexHistogram)) core_framework = CoreFramework(verbose=verbose, normalize=normalize, base_graph_kernel=base_graph_kernel) kernel = [{ "name": "core_framework" }, { "name": "weisfeiler_lehman" }, { "name": "vertex_histogram" }] gk = GraphKernel(kernel=kernel, verbose=verbose, normalize=normalize) core_framework.fit(train) assert is_picklable(core_framework) gk.fit(train) assert is_picklable(gk)
def test_pyramid_match_no_labels(): """Random input test for the Pyramid Match kernel with no labels [+ generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=None) pm_kernel = PyramidMatch(verbose=verbose, normalize=normalize, with_labels=False) gk = GraphKernel(kernel={ "name": "PM", "with_labels": False }, verbose=verbose, normalize=normalize) try: pm_kernel.fit_transform(train) pm_kernel.transform(test) gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_propagation_pd(): """Random input test for the Propagation kernel [n_jobs=-1/generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('nl', 4)) gk = GraphKernel(kernel="PR", verbose=verbose, normalize=normalize, n_jobs=-1) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('na', 5)) gk = GraphKernel(kernel={ "name": "PR", "with_attributes": True }, verbose=verbose, normalize=normalize, n_jobs=-1) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_shortest_path_pd(): """Random input test for the Shortest Path kernel [n_jobs=-1 (for attributed)/decorator].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) gk = GraphKernel(kernel="SP", verbose=verbose, normalize=normalize) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception train, test = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=('na', 5)) gk = GraphKernel(kernel={ "name": "SP", "as_attributes": True }, verbose=verbose, normalize=normalize, n_jobs=-1) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_neighborhood_hash(): """Picklability test for the Neighborhood Hash kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) nh_kernel = NeighborhoodHash(verbose=verbose, normalize=normalize) nh_kernel.fit(train) assert is_picklable(nh_kernel)
def test_lovasz_theta(): """Picklability test for the Lovasz-theta distance kernel.""" train, _ = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=None) lt_kernel = LovaszTheta(verbose=verbose, normalize=normalize) lt_kernel.fit(train) assert is_picklable(lt_kernel)
def test_random_walk_labels(): """Picklability test for the Labelled Random Walk kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(0.01, 12.0), n_graphs_test=40, random_state=rs, features=('nl', 3)) rw_kernel = RandomWalkLabeled(verbose=verbose, normalize=normalize) rw_kernel.fit(train) assert is_picklable(rw_kernel)
def test_graph_hopper(): """Picklability test for the Graph Hopper kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('na', 4)) gh_kernel = GraphHopper(verbose=verbose, normalize=normalize) gh_kernel.fit(train) assert is_picklable(gh_kernel)
def test_svm_theta(): """Picklability test for the SVM-theta distance kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=None) svm_kernel = SvmTheta(verbose=verbose, normalize=normalize) svm_kernel.fit(train) assert is_picklable(svm_kernel)
def test_multiscale_laplacian(): """Picklability test for the Multiscale Laplacian kernel.""" # Intialise kernel train, _ = generate_dataset(n_graphs=30, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=10, random_state=rs, features=('na', 5)) ml_kernel = MultiscaleLaplacian(verbose=verbose, normalize=normalize) ml_kernel.fit(train) assert is_picklable(ml_kernel)
def test_weisfeiler_lehman_optimal_assignment(): """Picklability test for the Weisfeiler Lehman kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) wl_oa_kernel = WeisfeilerLehmanOptimalAssignment(verbose=verbose, normalize=normalize) wl_oa_kernel.fit(train) assert is_picklable(wl_oa_kernel)
def test_hadamard_code(): """Picklability test for the Hadamard Code kernel.""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 5)) hadamard_code_kernel = HadamardCode(verbose=verbose, normalize=normalize, base_graph_kernel=VertexHistogram) hadamard_code_kernel.fit(train) assert is_picklable(hadamard_code_kernel)
def test_propagation(): """Random input test for the Propagation kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('nl', 4)) propagation_kernel = Propagation(verbose=verbose, normalize=normalize) try: propagation_kernel.fit_transform(train) propagation_kernel.transform(test) assert True except Exception as exception: assert False, exception train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(float("1e-5"), 10), n_graphs_test=40, random_state=rs, features=('na', 5)) propagation_kernel_attr = PropagationAttr(verbose=verbose, normalize=normalize) try: propagation_kernel_attr.fit_transform(train) propagation_kernel_attr.transform(test) assert True except Exception as exception: assert False, exception
def test_random_walk(): """Random input test for the Simple Random Walk kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(0.01, 12.0), n_graphs_test=40, random_state=rs, features=None) rw_kernel = RandomWalk(verbose=verbose, normalize=normalize) try: rw_kernel.fit_transform(train) rw_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_graph_hopper(): """Random input test for the Graph Hopper kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('na', 4)) gh_kernel = GraphHopper(verbose=verbose, normalize=normalize) try: gh_kernel.fit_transform(train) gh_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_pyramid_match(): """Picklability test for the Pyramid Match kernel [+ generic-wrapper].""" train, _ = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) pm_kernel = PyramidMatch(verbose=verbose, normalize=normalize) gk = GraphKernel(kernel={"name": "pyramid_match"}, verbose=verbose, normalize=normalize) pm_kernel.fit(train) assert is_picklable(pm_kernel) gk.fit(train) assert is_picklable(gk)
def test_svm_theta_pd(): """Random input test for the SVM-theta distance kernel [n_jobs=-1/generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=None) gk = GraphKernel(kernel="svm_theta", verbose=verbose, normalize=normalize) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception
def test_svm_theta(): """Random input test for the SVM-theta distance kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=None) svm_kernel = SvmTheta(verbose=verbose, normalize=normalize) try: svm_kernel.fit_transform(train) svm_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_lovasz_theta(): """Random input test for the Lovasz-theta distance kernel.""" train, test = generate_dataset(n_graphs=50, r_vertices=(5, 10), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=20, random_state=rs, features=None) lt_kernel = LovaszTheta(verbose=verbose, normalize=normalize) try: lt_kernel.fit_transform(train) lt_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_neighborhood_hash(): """Random input test for the Neighborhood Hash kernel.""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) nh_kernel = NeighborhoodHash(verbose=verbose, normalize=normalize) try: nh_kernel.fit_transform(train) nh_kernel.transform(test) assert True except Exception as exception: assert False, exception
def test_weisfeiler_lehman_pd(): """Random input test for the Weisfeiler Lehman kernel [n_jobs=-1/generic-wrapper].""" train, test = generate_dataset(n_graphs=100, r_vertices=(10, 20), r_connectivity=(0.4, 0.8), r_weight_edges=(1, 1), n_graphs_test=40, random_state=rs, features=('nl', 3)) gk = GraphKernel(kernel="WL", verbose=verbose, normalize=normalize) try: gk.fit_transform(train) gk.transform(test) assert True except Exception as exception: assert False, exception