def test_estimate_softmax_scale(self, adata: AnnData): vk = VelocityKernel(adata) vk.compute_transition_matrix(mode="deterministic", show_progress_bar=False, softmax_scale=None) assert isinstance(vk.params["softmax_scale"], float)
def test_custom_function(self, adata: AnnData, backward: bool): vk = VelocityKernel(adata, backward=backward) vk.compute_transition_matrix(mode="deterministic", softmax_scale=4, scheme=CustomFuncHessian()) assert vk.params["scheme"] == str(CustomFuncHessian())
def create_kernels( adata: AnnData, velocity_variances: Optional[str] = None, connectivity_variances: Optional[str] = None, ) -> Tuple[VelocityKernel, ConnectivityKernel]: vk = VelocityKernel(adata) vk._mat_scaler = adata.uns.get( velocity_variances, np.random.normal(size=(adata.n_obs, adata.n_obs)) ) ck = ConnectivityKernel(adata) ck._mat_scaler = adata.uns.get( connectivity_variances, np.random.normal(size=(adata.n_obs, adata.n_obs)) ) vk._transition_matrix = csr_matrix(np.eye(adata.n_obs)) ck._transition_matrix = np.eye(adata.n_obs, k=1) / 2 + np.eye(adata.n_obs) / 2 ck._transition_matrix[-1, -1] = 1 ck._transition_matrix = csr_matrix(ck._transition_matrix) np.testing.assert_allclose( np.sum(ck._transition_matrix.A, axis=1), 1 ) # sanity check return vk, ck
def test_write_adata_key(self, adata: AnnData): vk = VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) vk.write_to_adata(key="foo") assert adata is vk.adata assert "foo_params" in adata.uns.keys() np.testing.assert_array_equal(adata.obsp["foo"].toarray(), vk.transition_matrix.toarray())
def test_precomputed_adata_origin(self, adata: AnnData): vk = VelocityKernel(adata).compute_transition_matrix(mode="stochastic", softmax_scale=4) vk.write_to_adata("foo") pk = PrecomputedKernel("foo", adata=adata) assert pk._origin == "adata.obsp['foo']"
def test_custom_function_stochastic_no_hessian(self, adata: AnnData): vk = VelocityKernel(adata) vk.compute_transition_matrix(mode="stochastic", scheme=CustomFunc(), softmax_scale=4, n_samples=10) assert vk.params["mode"] == "monte_carlo" assert vk.params["scheme"] == str(CustomFunc())
def test_copy_velocity_kernel(self, adata: AnnData): vk1 = VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) vk2 = vk1.copy() np.testing.assert_array_equal(vk1.transition_matrix.A, vk2.transition_matrix.A) np.testing.assert_array_equal(vk1.logits.A, vk2.logits.A) assert vk1.params == vk2.params assert vk1.backward == vk2.backward
def test_constant_wrong_parentheses(self, adata: AnnData): k = VelocityKernel(adata).compute_transition_matrix( softmax_scale=4) + ( ConnectivityKernel(adata).compute_transition_matrix() + ConnectivityKernel(adata).compute_transition_matrix()) k.compute_transition_matrix() c1, c2, c3 = _is_bin_mult(k[0]), _is_bin_mult(k[1]), _is_bin_mult(k[2]) assert c1.transition_matrix == 1 / 3 assert c2.transition_matrix == 1 / 3 assert c3.transition_matrix == 1 / 3
def test_bwd_pipelne_cflare(self, adata: AnnData): vk = VelocityKernel( adata, backward=True).compute_transition_matrix(softmax_scale=4) ck = ConnectivityKernel(adata, backward=True).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck estimator_bwd = cr.tl.estimators.CFLARE(final_kernel) estimator_bwd.compute_partition() estimator_bwd.compute_eigendecomposition() estimator_bwd.plot_spectrum() estimator_bwd.plot_spectrum(real_only=True) estimator_bwd.plot_eigendecomposition() estimator_bwd.plot_eigendecomposition(left=False) estimator_bwd.compute_terminal_states(use=1) estimator_bwd.plot_terminal_states() estimator_bwd.compute_absorption_probabilities() estimator_bwd.plot_absorption_probabilities() estimator_bwd.compute_lineage_drivers(cluster_key="clusters", use_raw=False) _assert_has_all_keys(adata, Direction.BACKWARD)
def test_compute_schur_write_eigvals_similar_to_orig_eigdecomp( self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.GPCCA(final_kernel) mc.compute_eigendecomposition(k=10, only_evals=True) _check_eigdecomposition(mc) orig_ed = deepcopy(mc._get(P.EIG)) mc._set(A.EIG, None) mc.compute_schur(n_components=10, method="krylov") _check_eigdecomposition(mc) schur_ed = mc._get(P.EIG) assert orig_ed.keys() == schur_ed.keys() assert orig_ed["eigengap"] == schur_ed["eigengap"] n = min(orig_ed["params"]["k"], schur_ed["params"]["k"]) np.testing.assert_array_almost_equal(orig_ed["D"].real[:n], schur_ed["D"].real[:n]) np.testing.assert_array_almost_equal( np.abs(orig_ed["D"].imag[:n]), np.abs(schur_ed["D"].imag[:n])) # complex conj.
def _create_cflare(*, backward: bool = False) -> Tuple[AnnData, CFLARE]: adata = _adata_medium.copy() sc.tl.paga(adata, groups="clusters") vk = VelocityKernel(adata, backward=backward).compute_transition_matrix( softmax_scale=4 ) ck = ConnectivityKernel(adata, backward=backward).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck mc = CFLARE(final_kernel) mc.compute_partition() mc.compute_eigendecomposition() mc.compute_final_states(use=2) mc.compute_absorption_probabilities(use_petsc=False) mc.compute_lineage_drivers(cluster_key="clusters", use_raw=False) assert adata is mc.adata if backward: assert str(AbsProbKey.BACKWARD) in adata.obsm else: assert str(AbsProbKey.FORWARD) in adata.obsm np.testing.assert_array_almost_equal(mc.absorption_probabilities.sum(1), 1) return adata, mc
def test_compute_absorption_probabilities_solver_petsc( self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck tol = 1e-6 mc = cr.tl.estimators.CFLARE(final_kernel) mc.compute_eigendecomposition(k=5) mc.compute_final_states(use=2) # compute lin probs using direct solver mc.compute_absorption_probabilities(solver="gmres", use_petsc=False, tol=tol) l_iter = mc._get(P.ABS_PROBS).copy() # compute lin probs using petsc iterative solver mc.compute_absorption_probabilities(solver="gmres", use_petsc=True, tol=tol) l_iter_petsc = mc._get(P.ABS_PROBS).copy() assert not np.shares_memory(l_iter.X, l_iter_petsc.X) # sanity check np.testing.assert_allclose(l_iter.X, l_iter_petsc.X, rtol=0, atol=tol)
def test_compute_absorption_probabilities_normal_run( self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.CFLARE(final_kernel) mc.compute_eigendecomposition(k=5) mc.compute_final_states(use=2) mc.compute_absorption_probabilities() assert isinstance(mc._get(P.DIFF_POT), pd.Series) assert f"{AbsProbKey.FORWARD}_dp" in mc.adata.obs.keys() np.testing.assert_array_equal(mc._get(P.DIFF_POT), mc.adata.obs[f"{AbsProbKey.FORWARD}_dp"]) assert isinstance(mc._get(P.ABS_PROBS), cr.tl.Lineage) assert mc._get(P.ABS_PROBS).shape == (mc.adata.n_obs, 2) assert f"{AbsProbKey.FORWARD}" in mc.adata.obsm.keys() np.testing.assert_array_equal( mc._get(P.ABS_PROBS).X, mc.adata.obsm[f"{AbsProbKey.FORWARD}"]) assert _lin_names(AbsProbKey.FORWARD) in mc.adata.uns.keys() np.testing.assert_array_equal( mc._get(P.ABS_PROBS).names, mc.adata.uns[_lin_names(AbsProbKey.FORWARD)], ) assert _colors(AbsProbKey.FORWARD) in mc.adata.uns.keys() np.testing.assert_array_equal( mc._get(P.ABS_PROBS).colors, mc.adata.uns[_colors(AbsProbKey.FORWARD)], ) np.testing.assert_allclose(mc._get(P.ABS_PROBS).X.sum(1), 1)
def test_compute_priming_clusters(self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() terminal_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.GPCCA(terminal_kernel) mc.compute_schur(n_components=10, method="krylov") mc.compute_macrostates(n_states=2) mc.set_terminal_states_from_macrostates() mc.compute_absorption_probabilities() cat = adata_large.obs["clusters"].cat.categories[0] deg1 = mc.compute_lineage_priming(method="kl_divergence", early_cells={"clusters": [cat]}) deg2 = mc.compute_lineage_priming( method="kl_divergence", early_cells=(adata_large.obs["clusters"] == cat).values, ) assert_series_equal(deg1, deg2) # because passing it to a dataframe changes its name assert_series_equal(adata_large.obs[_pd(mc._abs_prob_key)], deg1, check_names=False) assert_series_equal(mc._get(A.PRIME_DEG), deg1)
def test_compute_initial_states_from_forward_normal_run( self, adata_large: AnnData): vk = VelocityKernel( adata_large, backward=False).compute_transition_matrix(softmax_scale=4) ck = ConnectivityKernel(adata_large, backward=False).compute_transition_matrix() terminal_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.GPCCA(terminal_kernel) mc.compute_schur(n_components=10, method="krylov") mc.compute_macrostates(n_states=2, n_cells=5) obsm_keys = set(mc.adata.obsm.keys()) expected = mc._get(P.COARSE_STAT_D).index[np.argmin( mc._get(P.COARSE_STAT_D))] mc._compute_initial_states(1) key = TermStatesKey.BACKWARD.s assert key in mc.adata.obs np.testing.assert_array_equal(mc.adata.obs[key].cat.categories, [expected]) assert _probs(key) in mc.adata.obs assert _colors(key) in mc.adata.uns assert _lin_names(key) in mc.adata.uns # make sure that we don't write anything there - it's useless assert set(mc.adata.obsm.keys()) == obsm_keys
def _create_gpcca(*, backward: bool = False) -> Tuple[AnnData, GPCCA]: adata = _adata_medium.copy() sc.tl.paga(adata, groups="clusters") vk = VelocityKernel( adata, backward=backward).compute_transition_matrix(softmax_scale=4) ck = ConnectivityKernel(adata, backward=backward).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck mc = GPCCA(final_kernel) mc.compute_partition() mc.compute_eigendecomposition() mc.compute_schur(method="krylov") mc.compute_macrostates(n_states=2) mc.set_terminal_states_from_macrostates() mc.compute_absorption_probabilities() mc.compute_lineage_drivers(cluster_key="clusters", use_raw=False) assert adata is mc.adata if backward: assert str(AbsProbKey.BACKWARD) in adata.obsm else: assert str(AbsProbKey.FORWARD) in adata.obsm np.testing.assert_allclose(mc.absorption_probabilities.X.sum(1), 1.0, rtol=1e-6) return adata, mc
def test_str_repr_equiv_no_transition_matrix(self, adata: AnnData): vk = VelocityKernel(adata) string = str(vk) rpr = repr(vk) assert string == rpr assert string == "<Velo>"
def test_inversion_propagation(self, adata: AnnData): c = ConnectivityKernel(adata, backward=False) v = VelocityKernel(adata, backward=False) k = ~(c + v) assert c.backward assert v.backward assert k.backward
def test_transition_probabilities_bwd(self, adata: AnnData): # test whether transition probabilities in cellrank match those from scvelo, backward case sigma_test = 3 # compute transition probabilities using cellrank vk = VelocityKernel(adata, backward=True) vk.compute_transition_matrix(softmax_scale=sigma_test, mode="deterministic") T_cr = vk.transition_matrix pearson_correlation = vk.pearson_correlations T_exp = np.expm1(pearson_correlation * sigma_test) T_exp.data += 1 T_exp = _normalize(T_exp) np.testing.assert_allclose(T_exp.A, T_cr.A) # don't use data, can be reordered
def test_kernels_multiple_constant(self, adata: AnnData): vk = VelocityKernel(adata) ck = ConnectivityKernel(adata) v = 100 * vk + 42 * ck assert len(v.kernels) == 2 assert vk in v.kernels assert ck in v.kernels
def test_kernels_multiple(self, adata: AnnData): vk = VelocityKernel(adata) ck = ConnectivityKernel(adata) v = vk + ck assert len(v.kernels) == 2 assert vk in v.kernels assert ck in v.kernels
def test_parent(self, adata: AnnData): vk = VelocityKernel(adata) ck = ConnectivityKernel(adata) k = vk + ck assert vk._parent._parent is k # invisible constants assert ck._parent._parent is k assert k._parent is None
def test_addition_3_kernels(self, adata: AnnData): vk, ck = create_kernels(adata) # diagonal + upper diag vk1 = VelocityKernel(adata) vk1._transition_matrix = np.eye(adata.n_obs, k=-1) / 2 + np.eye( adata.n_obs) / 2 vk1._transition_matrix[0, 0] = 1 np.testing.assert_allclose(np.sum(ck._transition_matrix, axis=1), 1) # sanity check k = (vk + ck + vk1).compute_transition_matrix() expected = (np.eye(adata.n_obs) * (1 / 3 + 1 / 6 + 1 / 6) + np.eye(adata._n_obs, k=1) * 1 / 6 + np.eye(adata.n_obs, k=-1) * 1 / 6) expected[0, 0] = expected[-1, -1] = 2 / 3 + 1 / 3 * 0.5 expected[0, 1] = expected[-1, -2] = 1 - expected[0, 0] np.testing.assert_allclose(k.transition_matrix.A, expected)
def test_not_none_transition_matrix_accessor(self, adata: AnnData): vk = VelocityKernel(adata) ck = ConnectivityKernel(adata) pk = PalantirKernel(adata, time_key="latent_time") assert vk.transition_matrix is not None assert ck.transition_matrix is not None assert pk.transition_matrix is not None
def test_compute_macrostates_1_state_no_eig(self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() terminal_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.GPCCA(terminal_kernel) mc.compute_macrostates(n_states=1)
def test_compute_terminal_states_no_eig(self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix(softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() terminal_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.CFLARE(terminal_kernel) with pytest.raises(RuntimeError): mc.compute_terminal_states(use=2)
def test_pearson_correlations_fwd(self, adata: AnnData): # test whether pearson correlations in cellrank match those from scvelo, forward case backward = False # compute pearson correlations using scvelo velo_graph = adata.uns["velocity_graph"] + adata.uns[ "velocity_graph_neg"] # compute pearson correlations using cellrank vk = VelocityKernel(adata, backward=backward) vk.compute_transition_matrix(mode="deterministic", softmax_scale=4) pearson_correlations_cr = vk.pearson_correlations pc_r = velo_graph.copy() pc_r.data = np.array( pearson_correlations_cr[(velo_graph != 0)]).squeeze() assert np.max(np.abs((pc_r - velo_graph).data)) < _rtol
def test_not_none_transition_matrix_compute(self, adata: AnnData): vk = VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) ck = ConnectivityKernel(adata).compute_transition_matrix() pk = PalantirKernel( adata, time_key="latent_time").compute_transition_matrix() assert vk.transition_matrix is not None assert ck.transition_matrix is not None assert pk.transition_matrix is not None
def test_constant_normalize_2(self, adata: AnnData): k = (9 * VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) + 1 * ConnectivityKernel(adata).compute_transition_matrix()) k.compute_transition_matrix() c1, c2 = _is_bin_mult(k[0]), _is_bin_mult(k[1]) assert c1.transition_matrix == 9 / 10 assert c2.transition_matrix == 1 / 10
def test_compute_metastable_states_no_eig(self, adata_large: AnnData): vk = VelocityKernel(adata_large).compute_transition_matrix( softmax_scale=4) ck = ConnectivityKernel(adata_large).compute_transition_matrix() final_kernel = 0.8 * vk + 0.2 * ck mc = cr.tl.estimators.GPCCA(final_kernel) with pytest.raises(RuntimeError): mc.compute_metastable_states(n_states=None)