def test_addition_adaptive_4_kernels(self, adata: AnnData): a, b, c, d = np.random.uniform(0, 10, 4) s = a + b + c + d adata.uns["velocity_variances"] = vv = np.random.random( size=(adata.n_obs, adata.n_obs)) adata.uns["connectivity_variances"] = cv = np.random.random( size=(adata.n_obs, adata.n_obs)) vk, ck = create_kernels( adata, velocity_variances="velocity_variances", connectivity_variances="connectivity_variances", ) vk1, ck1 = create_kernels( adata, velocity_variances="velocity_variances", connectivity_variances="connectivity_variances", ) k = a * vk ^ b * ck ^ c * vk1 ^ d * ck1 expected = _normalize(a / s * vv * vk.transition_matrix + b / s * cv * ck.transition_matrix + c / s * vv * vk1.transition_matrix + d / s * cv * ck1.transition_matrix) np.testing.assert_allclose(k.transition_matrix.A, expected)
def _restich_tmaps( self, tmaps: Mapping[Tuple[float, float], AnnData], last_time_point: LastTimePoint = LastTimePoint.DIAGONAL, conn_kwargs: Mapping[str, Any] = MappingProxyType({}), normalize: bool = True, ) -> AnnData: from cellrank.tl.kernels import ConnectivityKernel conn_kwargs = dict(conn_kwargs) conn_kwargs["copy"] = False _ = conn_kwargs.pop("key_added", None) density_normalize = conn_kwargs.pop("density_normalize", True) blocks = [[None] * (len(tmaps) + 1) for _ in range(len(tmaps) + 1)] nrows, ncols = 0, 0 obs_names, obs = [], [] for i, tmap in enumerate(tmaps.values()): blocks[i][i + 1] = _normalize(tmap.X) if normalize else tmap.X nrows += tmap.n_obs ncols += tmap.n_vars obs_names.extend(tmap.obs_names) obs.append(tmap.obs) obs_names.extend(tmap.var_names) n = self.adata.n_obs - nrows if last_time_point == LastTimePoint.DIAGONAL: blocks[-1][-1] = spdiags([1] * n, 0, n, n) elif last_time_point == LastTimePoint.UNIFORM: blocks[-1][-1] = np.ones((n, n)) / float(n) elif last_time_point == LastTimePoint.CONNECTIVITIES: adata_subset = self.adata[tmap.var_names].copy() sc.pp.neighbors(adata_subset, **conn_kwargs) blocks[-1][-1] = ( ConnectivityKernel(adata_subset).compute_transition_matrix( density_normalize).transition_matrix) else: raise NotImplementedError( f"Last time point mode `{last_time_point}` is not yet implemented." ) # prevent the last block from disappearing n = blocks[0][1].shape[0] blocks[0][0] = spdiags([], 0, n, n) tmp = AnnData(bmat(blocks, format="csr")) tmp.obs_names = obs_names tmp.var_names = obs_names tmp = tmp[self.adata.obs_names, :][:, self.adata.obs_names] tmp.obs = pd.merge( tmp.obs, pd.concat(obs), left_index=True, right_index=True, how="left", ) return tmp
def test_palantir(self, adata: AnnData): conn = _get_neighs(adata, "connectivities") n_neighbors = _get_neighs_params(adata)["n_neighbors"] pseudotime = adata.obs["latent_time"] conn_biased = bias_knn(conn, pseudotime, n_neighbors) T_1 = _normalize(conn_biased) pk = PalantirKernel(adata, time_key="latent_time").compute_transition_matrix( density_normalize=False) T_2 = pk.transition_matrix np.testing.assert_allclose(T_1.A, T_2.A, rtol=_rtol)
def transition_matrix(self, value: Union[np.ndarray, spmatrix]) -> None: """ Set a new value of the transition matrix. Parameters ---------- value The new transition matrix. If the expression has no parent, the matrix is normalized, if needed. Returns ------- None Nothing, just updates the :attr:`transition_matrix` and optionally normalizes it. """ should_norm = ~np.isclose(value.sum(1), 1.0, rtol=_RTOL).all() if self._parent is None: self._transition_matrix = _normalize( value) if should_norm else value else: # it's AND, not OR, because of combinations self._transition_matrix = (_normalize(value) if self._normalize and should_norm else value)
def test_addition_adaptive(self, adata: AnnData): adata.uns["velocity_variances"] = vv = np.random.random( size=(adata.n_obs, adata.n_obs)) adata.uns["connectivity_variances"] = cv = np.random.random( size=(adata.n_obs, adata.n_obs)) vk, ck = create_kernels( adata, velocity_variances="velocity_variances", connectivity_variances="connectivity_variances", ) k = vk ^ ck expected = _normalize(0.5 * vv * vk.transition_matrix + 0.5 * cv * ck.transition_matrix) np.testing.assert_allclose(k.transition_matrix.A, expected)
def test_transition_probabilities_bwd(self, adata: AnnData): # test whether transition probabilities in cellrank match those from scvelo, backward case sigma_test = 3 # compute transition probabilities using cellrank vk = VelocityKernel(adata, backward=True) vk.compute_transition_matrix(softmax_scale=sigma_test, mode="deterministic") T_cr = vk.transition_matrix pearson_correlation = vk.pearson_correlations T_exp = np.expm1(pearson_correlation * sigma_test) T_exp.data += 1 T_exp = _normalize(T_exp) np.testing.assert_allclose(T_exp.A, T_cr.A) # don't use data, can be reordered
def test_addition_adaptive_wrong_variances(self, adata: AnnData): a, b = np.random.uniform(0, 10, 2) s = a + b adata.uns["velocity_variances"] = np.random.random(size=(adata.n_obs, adata.n_obs)) adata.uns["connectivity_variances"] = np.random.random( size=(adata.n_obs, adata.n_obs)) vk, ck = create_kernels( adata, velocity_variances="velocity_variances", connectivity_variances="connectivity_variances", ) k = a * vk ^ b * ck expected = _normalize(a / s * vk.transition_matrix + b / s * ck.transition_matrix) assert not np.allclose(k.transition_matrix.A, expected.A)