Beispiel #1
0
    def test_constant_normalize_3(self, adata: AnnData):
        k = (VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) +
             ConnectivityKernel(adata).compute_transition_matrix() +
             ConnectivityKernel(adata).compute_transition_matrix())
        k.compute_transition_matrix()
        c1, c2, c3 = _is_bin_mult(k[0]), _is_bin_mult(k[1]), _is_bin_mult(k[2])

        assert c1.transition_matrix == 1 / 3
        assert c2.transition_matrix == 1 / 3
        assert c3.transition_matrix == 1 / 3
Beispiel #2
0
    def test_adaptive_kernel_constants(self, adata: AnnData):
        ck1 = ConnectivityKernel(adata).compute_transition_matrix()
        ck1._mat_scaler = np.random.normal(size=(adata.n_obs, adata.n_obs))

        ck2 = ConnectivityKernel(adata).compute_transition_matrix()
        ck2._mat_scaler = np.random.normal(size=(adata.n_obs, adata.n_obs))

        k = (3 * ck1) ^ (1 * ck2)
        k.compute_transition_matrix()

        assert k[0][0]._value == 3 / 4
        assert k[1][0]._value == 1 / 4
Beispiel #3
0
    def test_constant_correct_parentheses(self, adata: AnnData):
        k = 1 * VelocityKernel(adata).compute_transition_matrix(
            softmax_scale=4) + 1 * (
                ConnectivityKernel(adata).compute_transition_matrix() +
                ConnectivityKernel(adata).compute_transition_matrix())
        k.compute_transition_matrix()
        c1, c2, c3 = (
            _is_bin_mult(k[0]),
            _is_bin_mult(k[1][1][0]),
            _is_bin_mult(k[1][1][1]),
        )

        assert c1.transition_matrix == 1 / 2
        assert c2.transition_matrix == 1 / 2
        assert c3.transition_matrix == 1 / 2
Beispiel #4
0
    def test_compute_absorption_probabilities_solver_petsc(
            self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck
        tol = 1e-6

        mc = cr.tl.estimators.CFLARE(final_kernel)
        mc.compute_eigendecomposition(k=5)
        mc.compute_final_states(use=2)

        # compute lin probs using direct solver
        mc.compute_absorption_probabilities(solver="gmres",
                                            use_petsc=False,
                                            tol=tol)
        l_iter = mc._get(P.ABS_PROBS).copy()

        # compute lin probs using petsc iterative solver
        mc.compute_absorption_probabilities(solver="gmres",
                                            use_petsc=True,
                                            tol=tol)
        l_iter_petsc = mc._get(P.ABS_PROBS).copy()

        assert not np.shares_memory(l_iter.X, l_iter_petsc.X)  # sanity check
        np.testing.assert_allclose(l_iter.X, l_iter_petsc.X, rtol=0, atol=tol)
Beispiel #5
0
    def test_compute_absorption_probabilities_normal_run(
            self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.CFLARE(final_kernel)
        mc.compute_eigendecomposition(k=5)
        mc.compute_final_states(use=2)
        mc.compute_absorption_probabilities()

        assert isinstance(mc._get(P.DIFF_POT), pd.Series)
        assert f"{AbsProbKey.FORWARD}_dp" in mc.adata.obs.keys()
        np.testing.assert_array_equal(mc._get(P.DIFF_POT),
                                      mc.adata.obs[f"{AbsProbKey.FORWARD}_dp"])

        assert isinstance(mc._get(P.ABS_PROBS), cr.tl.Lineage)
        assert mc._get(P.ABS_PROBS).shape == (mc.adata.n_obs, 2)
        assert f"{AbsProbKey.FORWARD}" in mc.adata.obsm.keys()
        np.testing.assert_array_equal(
            mc._get(P.ABS_PROBS).X, mc.adata.obsm[f"{AbsProbKey.FORWARD}"])

        assert _lin_names(AbsProbKey.FORWARD) in mc.adata.uns.keys()
        np.testing.assert_array_equal(
            mc._get(P.ABS_PROBS).names,
            mc.adata.uns[_lin_names(AbsProbKey.FORWARD)],
        )

        assert _colors(AbsProbKey.FORWARD) in mc.adata.uns.keys()
        np.testing.assert_array_equal(
            mc._get(P.ABS_PROBS).colors,
            mc.adata.uns[_colors(AbsProbKey.FORWARD)],
        )
        np.testing.assert_allclose(mc._get(P.ABS_PROBS).X.sum(1), 1)
Beispiel #6
0
    def test_compute_priming_clusters(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(terminal_kernel)
        mc.compute_schur(n_components=10, method="krylov")
        mc.compute_macrostates(n_states=2)
        mc.set_terminal_states_from_macrostates()
        mc.compute_absorption_probabilities()

        cat = adata_large.obs["clusters"].cat.categories[0]
        deg1 = mc.compute_lineage_priming(method="kl_divergence",
                                          early_cells={"clusters": [cat]})
        deg2 = mc.compute_lineage_priming(
            method="kl_divergence",
            early_cells=(adata_large.obs["clusters"] == cat).values,
        )

        assert_series_equal(deg1, deg2)
        # because passing it to a dataframe changes its name
        assert_series_equal(adata_large.obs[_pd(mc._abs_prob_key)],
                            deg1,
                            check_names=False)
        assert_series_equal(mc._get(A.PRIME_DEG), deg1)
Beispiel #7
0
    def test_compute_initial_states_from_forward_normal_run(
            self, adata_large: AnnData):
        vk = VelocityKernel(
            adata_large,
            backward=False).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata_large,
                                backward=False).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(terminal_kernel)
        mc.compute_schur(n_components=10, method="krylov")

        mc.compute_macrostates(n_states=2, n_cells=5)
        obsm_keys = set(mc.adata.obsm.keys())
        expected = mc._get(P.COARSE_STAT_D).index[np.argmin(
            mc._get(P.COARSE_STAT_D))]

        mc._compute_initial_states(1)

        key = TermStatesKey.BACKWARD.s

        assert key in mc.adata.obs
        np.testing.assert_array_equal(mc.adata.obs[key].cat.categories,
                                      [expected])
        assert _probs(key) in mc.adata.obs
        assert _colors(key) in mc.adata.uns
        assert _lin_names(key) in mc.adata.uns

        # make sure that we don't write anything there - it's useless
        assert set(mc.adata.obsm.keys()) == obsm_keys
Beispiel #8
0
    def test_bwd_pipelne_cflare(self, adata: AnnData):
        vk = VelocityKernel(
            adata, backward=True).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata,
                                backward=True).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck

        estimator_bwd = cr.tl.estimators.CFLARE(final_kernel)

        estimator_bwd.compute_partition()

        estimator_bwd.compute_eigendecomposition()
        estimator_bwd.plot_spectrum()
        estimator_bwd.plot_spectrum(real_only=True)
        estimator_bwd.plot_eigendecomposition()
        estimator_bwd.plot_eigendecomposition(left=False)

        estimator_bwd.compute_terminal_states(use=1)
        estimator_bwd.plot_terminal_states()

        estimator_bwd.compute_absorption_probabilities()
        estimator_bwd.plot_absorption_probabilities()

        estimator_bwd.compute_lineage_drivers(cluster_key="clusters",
                                              use_raw=False)

        _assert_has_all_keys(adata, Direction.BACKWARD)
Beispiel #9
0
    def _restich_tmaps(
        self,
        tmaps: Mapping[Tuple[float, float], AnnData],
        last_time_point: LastTimePoint = LastTimePoint.DIAGONAL,
        conn_kwargs: Mapping[str, Any] = MappingProxyType({}),
        normalize: bool = True,
    ) -> AnnData:
        from cellrank.tl.kernels import ConnectivityKernel

        conn_kwargs = dict(conn_kwargs)
        conn_kwargs["copy"] = False
        _ = conn_kwargs.pop("key_added", None)
        density_normalize = conn_kwargs.pop("density_normalize", True)

        blocks = [[None] * (len(tmaps) + 1) for _ in range(len(tmaps) + 1)]
        nrows, ncols = 0, 0
        obs_names, obs = [], []

        for i, tmap in enumerate(tmaps.values()):
            blocks[i][i + 1] = _normalize(tmap.X) if normalize else tmap.X
            nrows += tmap.n_obs
            ncols += tmap.n_vars
            obs_names.extend(tmap.obs_names)
            obs.append(tmap.obs)
        obs_names.extend(tmap.var_names)

        n = self.adata.n_obs - nrows
        if last_time_point == LastTimePoint.DIAGONAL:
            blocks[-1][-1] = spdiags([1] * n, 0, n, n)
        elif last_time_point == LastTimePoint.UNIFORM:
            blocks[-1][-1] = np.ones((n, n)) / float(n)
        elif last_time_point == LastTimePoint.CONNECTIVITIES:
            adata_subset = self.adata[tmap.var_names].copy()
            sc.pp.neighbors(adata_subset, **conn_kwargs)
            blocks[-1][-1] = (
                ConnectivityKernel(adata_subset).compute_transition_matrix(
                    density_normalize).transition_matrix)
        else:
            raise NotImplementedError(
                f"Last time point mode `{last_time_point}` is not yet implemented."
            )

        # prevent the last block from disappearing
        n = blocks[0][1].shape[0]
        blocks[0][0] = spdiags([], 0, n, n)

        tmp = AnnData(bmat(blocks, format="csr"))
        tmp.obs_names = obs_names
        tmp.var_names = obs_names
        tmp = tmp[self.adata.obs_names, :][:, self.adata.obs_names]

        tmp.obs = pd.merge(
            tmp.obs,
            pd.concat(obs),
            left_index=True,
            right_index=True,
            how="left",
        )

        return tmp
Beispiel #10
0
    def test_compute_schur_write_eigvals_similar_to_orig_eigdecomp(
            self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(final_kernel)
        mc.compute_eigendecomposition(k=10, only_evals=True)

        _check_eigdecomposition(mc)
        orig_ed = deepcopy(mc._get(P.EIG))

        mc._set(A.EIG, None)
        mc.compute_schur(n_components=10, method="krylov")

        _check_eigdecomposition(mc)
        schur_ed = mc._get(P.EIG)

        assert orig_ed.keys() == schur_ed.keys()
        assert orig_ed["eigengap"] == schur_ed["eigengap"]
        n = min(orig_ed["params"]["k"], schur_ed["params"]["k"])
        np.testing.assert_array_almost_equal(orig_ed["D"].real[:n],
                                             schur_ed["D"].real[:n])
        np.testing.assert_array_almost_equal(
            np.abs(orig_ed["D"].imag[:n]),
            np.abs(schur_ed["D"].imag[:n]))  # complex conj.
Beispiel #11
0
def _create_cflare(*, backward: bool = False) -> Tuple[AnnData, CFLARE]:
    adata = _adata_medium.copy()

    sc.tl.paga(adata, groups="clusters")

    vk = VelocityKernel(adata, backward=backward).compute_transition_matrix(
        softmax_scale=4
    )
    ck = ConnectivityKernel(adata, backward=backward).compute_transition_matrix()
    final_kernel = 0.8 * vk + 0.2 * ck

    mc = CFLARE(final_kernel)

    mc.compute_partition()
    mc.compute_eigendecomposition()
    mc.compute_final_states(use=2)
    mc.compute_absorption_probabilities(use_petsc=False)
    mc.compute_lineage_drivers(cluster_key="clusters", use_raw=False)

    assert adata is mc.adata
    if backward:
        assert str(AbsProbKey.BACKWARD) in adata.obsm
    else:
        assert str(AbsProbKey.FORWARD) in adata.obsm
    np.testing.assert_array_almost_equal(mc.absorption_probabilities.sum(1), 1)

    return adata, mc
Beispiel #12
0
def _create_gpcca(*, backward: bool = False) -> Tuple[AnnData, GPCCA]:
    adata = _adata_medium.copy()

    sc.tl.paga(adata, groups="clusters")

    vk = VelocityKernel(
        adata, backward=backward).compute_transition_matrix(softmax_scale=4)
    ck = ConnectivityKernel(adata,
                            backward=backward).compute_transition_matrix()
    final_kernel = 0.8 * vk + 0.2 * ck

    mc = GPCCA(final_kernel)

    mc.compute_partition()
    mc.compute_eigendecomposition()
    mc.compute_schur(method="krylov")
    mc.compute_macrostates(n_states=2)
    mc.set_terminal_states_from_macrostates()
    mc.compute_absorption_probabilities()
    mc.compute_lineage_drivers(cluster_key="clusters", use_raw=False)

    assert adata is mc.adata
    if backward:
        assert str(AbsProbKey.BACKWARD) in adata.obsm
    else:
        assert str(AbsProbKey.FORWARD) in adata.obsm
    np.testing.assert_allclose(mc.absorption_probabilities.X.sum(1),
                               1.0,
                               rtol=1e-6)

    return adata, mc
Beispiel #13
0
def create_kernels(
    adata: AnnData,
    velocity_variances: Optional[str] = None,
    connectivity_variances: Optional[str] = None,
) -> Tuple[VelocityKernel, ConnectivityKernel]:
    vk = VelocityKernel(adata)
    vk._mat_scaler = adata.uns.get(
        velocity_variances, np.random.normal(size=(adata.n_obs, adata.n_obs))
    )

    ck = ConnectivityKernel(adata)
    ck._mat_scaler = adata.uns.get(
        connectivity_variances, np.random.normal(size=(adata.n_obs, adata.n_obs))
    )

    vk._transition_matrix = csr_matrix(np.eye(adata.n_obs))
    ck._transition_matrix = np.eye(adata.n_obs, k=1) / 2 + np.eye(adata.n_obs) / 2
    ck._transition_matrix[-1, -1] = 1
    ck._transition_matrix = csr_matrix(ck._transition_matrix)

    np.testing.assert_allclose(
        np.sum(ck._transition_matrix.A, axis=1), 1
    )  # sanity check

    return vk, ck
Beispiel #14
0
    def test_compute_terminal_states_no_eig(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.CFLARE(terminal_kernel)
        with pytest.raises(RuntimeError):
            mc.compute_terminal_states(use=2)
Beispiel #15
0
    def test_kernels_multiple_constant(self, adata: AnnData):
        vk = VelocityKernel(adata)
        ck = ConnectivityKernel(adata)
        v = 100 * vk + 42 * ck

        assert len(v.kernels) == 2
        assert vk in v.kernels
        assert ck in v.kernels
Beispiel #16
0
    def test_kernels_multiple(self, adata: AnnData):
        vk = VelocityKernel(adata)
        ck = ConnectivityKernel(adata)
        v = vk + ck

        assert len(v.kernels) == 2
        assert vk in v.kernels
        assert ck in v.kernels
Beispiel #17
0
    def test_copy_works(self, adata: AnnData):
        ck1 = ConnectivityKernel(adata)
        ck2 = ck1.copy()
        ck1.compute_transition_matrix()

        assert (ck1._transition_matrix is not None
                )  # calling the property would trigger the calculation
        assert ck2._transition_matrix is None
Beispiel #18
0
    def test_parent(self, adata: AnnData):
        vk = VelocityKernel(adata)
        ck = ConnectivityKernel(adata)
        k = vk + ck

        assert vk._parent._parent is k  # invisible constants
        assert ck._parent._parent is k
        assert k._parent is None
Beispiel #19
0
    def test_not_none_transition_matrix_accessor(self, adata: AnnData):
        vk = VelocityKernel(adata)
        ck = ConnectivityKernel(adata)
        pk = PalantirKernel(adata, time_key="latent_time")

        assert vk.transition_matrix is not None
        assert ck.transition_matrix is not None
        assert pk.transition_matrix is not None
Beispiel #20
0
    def test_copy_connectivity_kernel(self, adata: AnnData):
        ck1 = ConnectivityKernel(adata).compute_transition_matrix()
        ck2 = ck1.copy()

        np.testing.assert_array_equal(ck1.transition_matrix.A,
                                      ck2.transition_matrix.A)
        assert ck1.params == ck2.params
        assert ck1.backward == ck2.backward
Beispiel #21
0
    def test_adaptive_kernel_complex(self, adata: AnnData):
        ck1 = ConnectivityKernel(adata).compute_transition_matrix()
        ck1._mat_scaler = np.random.normal(size=(adata.n_obs, adata.n_obs))

        ck2 = ConnectivityKernel(adata).compute_transition_matrix()
        ck2._mat_scaler = np.random.normal(size=(adata.n_obs, adata.n_obs))

        ck3 = ConnectivityKernel(adata).compute_transition_matrix()
        ck3._mat_scaler = np.random.normal(size=(adata.n_obs, adata.n_obs))

        k = 4 * ((3 * ck1) ^ (1 * ck2)) + 2 * ck3
        k.compute_transition_matrix()

        assert k[0][0].transition_matrix == 4 / 6
        assert k[1][0].transition_matrix == 2 / 6
        assert k[0][1][0][0]._value == 3 / 4
        assert k[0][1][1][0]._value == 1 / 4
Beispiel #22
0
    def test_inversion_propagation(self, adata: AnnData):
        c = ConnectivityKernel(adata, backward=False)
        v = VelocityKernel(adata, backward=False)
        k = ~(c + v)

        assert c.backward
        assert v.backward
        assert k.backward
Beispiel #23
0
    def test_compute_macrostates_1_state_no_eig(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(terminal_kernel)
        mc.compute_macrostates(n_states=1)
Beispiel #24
0
    def test_compute_terminal_states_too_large_use(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.CFLARE(terminal_kernel)
        mc.compute_eigendecomposition(k=2)
        with pytest.raises(ValueError):
            mc.compute_terminal_states(use=1000)
Beispiel #25
0
    def test_not_none_transition_matrix_compute(self, adata: AnnData):
        vk = VelocityKernel(adata).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata).compute_transition_matrix()
        pk = PalantirKernel(
            adata, time_key="latent_time").compute_transition_matrix()

        assert vk.transition_matrix is not None
        assert ck.transition_matrix is not None
        assert pk.transition_matrix is not None
Beispiel #26
0
    def test_rename_terminal_states_no_terminal_states(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.CFLARE(terminal_kernel)
        mc.compute_eigendecomposition(k=5)
        with pytest.raises(RuntimeError):
            mc.rename_terminal_states({"foo": "bar"})
Beispiel #27
0
    def test_compute_schur_invalid_eig_sort(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(final_kernel)
        with pytest.raises(ValueError):
            mc.compute_schur(which="foobar", method="krylov")
Beispiel #28
0
    def test_constant_normalize_2(self, adata: AnnData):
        k = (9 *
             VelocityKernel(adata).compute_transition_matrix(softmax_scale=4) +
             1 * ConnectivityKernel(adata).compute_transition_matrix())
        k.compute_transition_matrix()
        c1, c2 = _is_bin_mult(k[0]), _is_bin_mult(k[1])

        assert c1.transition_matrix == 9 / 10
        assert c2.transition_matrix == 1 / 10
Beispiel #29
0
    def test_compute_metastable_states_no_eig(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        final_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(final_kernel)
        with pytest.raises(RuntimeError):
            mc.compute_metastable_states(n_states=None)
Beispiel #30
0
    def test_compute_schur_invalid_n_comps(self, adata_large: AnnData):
        vk = VelocityKernel(adata_large).compute_transition_matrix(
            softmax_scale=4)
        ck = ConnectivityKernel(adata_large).compute_transition_matrix()
        terminal_kernel = 0.8 * vk + 0.2 * ck

        mc = cr.tl.estimators.GPCCA(terminal_kernel)
        with pytest.raises(ValueError):
            mc.compute_schur(n_components=1, method="krylov")