Beispiel #1
0
def test_fluxes_2():
    # depends on tpt.committors

    bmsm = BayesianMarkovStateModel(lag_time=1)
    assignments = np.random.randint(3, size=(10, 1000))
    bmsm.fit(assignments)

    # forward committors
    qplus = tpt.committors(0, 2, bmsm)

    ref_fluxes = np.zeros((3, 3))
    ref_net_fluxes = np.zeros((3, 3))
    for el in zip(bmsm.all_populations_, bmsm.all_transmats_):
        pop = el[0]
        tprob = el[1]
        for i in range(3):
            for j in range(3):
                if i != j:
                    # Eq. 2.24 in Metzner et al. Transition Path Theory.
                    # Multiscale Model. Simul. 2009, 7, 1192-1219.
                    ref_fluxes[i, j] += (pop[i] * tprob[i, j] *
                                         (1 - qplus[i]) * qplus[j])

    ref_fluxes /= 100.

    for i in range(3):
        for j in range(3):
            ref_net_fluxes[i, j] = np.max([0, ref_fluxes[i, j] -
                                          ref_fluxes[j, i]])

    fluxes = tpt.fluxes(0, 2, bmsm)
    net_fluxes = tpt.net_fluxes(0, 2, bmsm)

    npt.assert_array_almost_equal(ref_fluxes, fluxes, decimal=2)
    npt.assert_array_almost_equal(ref_net_fluxes, net_fluxes, decimal=2)
def fit_bayes_msms(yaml_file):
    mdl_params = yaml_file["mdl_params"]
    msm__lag_time = mdl_params["msm__lag_time"]
    if "bayesmsm__n_samples" in mdl_params.keys():
        bayesmsm__n_samples = mdl_params["bayesmsm__n_samples"]
    else:
        bayesmsm__n_samples = 800
    if "bayesmsm__n_steps" in mdl_params.keys():
        bayesmsm__n_steps = mdl_params["bayesmsm__n_steps"]
    else:
        bayesmsm__n_steps = 1000000

    for protein in yaml_file["protein_list"]:
        with enter_protein_mdl_dir(yaml_file, protein):
            print(protein)
            assignments = verboseload("assignments.pkl")
            msm_mdl = BayesianMarkovStateModel(n_samples=bayesmsm__n_samples,
                                               n_steps=bayesmsm__n_steps,
                                               lag_time=msm__lag_time,
                                               ergodic_cutoff=1.0/msm__lag_time,
                                               verbose=True).fit(
                [assignments[i] for i in assignments.keys()])
            _ = msm_mdl.all_eigenvalues_
            verbosedump(msm_mdl, "bayesmsm_mdl.pkl")
            fixed_assignments = {}
            for i in assignments.keys():
                fixed_assignments[i] = msm_mdl.transform(
                    assignments[i], mode='fill')[0]
            verbosedump(fixed_assignments, 'fixed_assignments.pkl')
    return
Beispiel #3
0
def test_ergodic_cutoff():
    assert (MarkovStateModel(lag_time=10).ergodic_cutoff ==
            BayesianMarkovStateModel(lag_time=10).ergodic_cutoff)
    assert (MarkovStateModel(lag_time=10)._parse_ergodic_cutoff() ==
            BayesianMarkovStateModel(lag_time=10)._parse_ergodic_cutoff())
    for cut_off in [0.01, 'on', 'off']:
        assert (MarkovStateModel(ergodic_cutoff=cut_off).ergodic_cutoff ==
                BayesianMarkovStateModel(ergodic_cutoff=cut_off).ergodic_cutoff)
Beispiel #4
0
def test_committors_2():
    bmsm = BayesianMarkovStateModel(lag_time=1)
    assignments = np.random.randint(3, size=(10, 1000))
    bmsm.fit(assignments)

    committors = tpt.committors([0], [2], bmsm)

    ref = 0
    for tprob in bmsm.all_transmats_:
        ref += np.power(tprob[1, 1], np.arange(1000)).sum() * tprob[1, 2]
    ref = np.array([0, ref / 100., 1])

    npt.assert_array_almost_equal(ref, committors, decimal=2)
Beispiel #5
0
def test_3():
    trajectory = [
        0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 0, 0,
        0, 2, 2, 2, 0, 0, 0
    ]
    msm1 = BayesianMarkovStateModel(sampler='metzner',
                                    n_steps=1,
                                    n_samples=100,
                                    n_chains=1,
                                    random_state=0)
    msm1.fit([trajectory])
    msm2 = BayesianMarkovStateModel(sampler='metzner_py',
                                    n_steps=1,
                                    n_samples=100,
                                    n_chains=1,
                                    random_state=0)
    msm2.fit([trajectory])

    np.testing.assert_array_almost_equal(msm1.all_transmats_,
                                         msm2.all_transmats_)

    assert msm1.all_timescales_.shape == (100, 2)
    assert msm1.all_eigenvalues_.shape == (100, 3)
    assert msm1.all_left_eigenvectors_.shape == (100, 3, 3)
    assert msm1.all_right_eigenvectors_.shape == (100, 3, 3)
    assert msm1.all_populations_.shape == (100, 3)
    np.testing.assert_array_almost_equal(msm1.all_populations_.sum(axis=1),
                                         np.ones(100))
def test_5():
    trjs = DoubleWell(random_state=0).get_cached().trajectories
    clusterer = NDGrid(n_bins_per_feature=5)
    mle_msm = MarkovStateModel(lag_time=100, verbose=False)
    b_msm = BayesianMarkovStateModel(lag_time=100, n_samples=1000, n_chains=8, n_steps=1000, random_state=0)

    states = clusterer.fit_transform(trjs)
    b_msm.fit(states)
    mle_msm.fit(states)

    # this is a pretty silly test. it checks that the mean transition
    # matrix is not so dissimilar from the MLE transition matrix.
    # This shouldn't necessarily be the case anyways -- the likelihood is
    # not "symmetric". And the cutoff chosen is just heuristic.
    assert np.linalg.norm(b_msm.all_transmats_.mean(axis=0) - mle_msm.transmat_) < 1e-2
Beispiel #7
0
def test_4():
    trajectory = [
        0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 0, 0,
        0, 2, 2, 2, 0, 0, 0
    ]
    msm1 = BayesianMarkovStateModel(n_steps=3,
                                    n_samples=10,
                                    n_chains=1,
                                    random_state=0).fit([trajectory])
    assert msm1.all_transmats_.shape[0] == 10

    msm2 = BayesianMarkovStateModel(n_steps=4,
                                    n_samples=10,
                                    n_chains=3,
                                    random_state=0).fit([trajectory])
    assert msm2.all_transmats_.shape[0] == 10
Beispiel #8
0
def test_cond_committors_2():
    # depends on tpt.committors

    bmsm = BayesianMarkovStateModel(lag_time=1)
    assignments = np.random.randint(4, size=(10, 1000))
    bmsm.fit(assignments)

    for_committors = tpt.committors(0, 3, bmsm)
    cond_committors = tpt.conditional_committors(0, 3, 2, bmsm)

    ref = 0
    for tprob in bmsm.all_transmats_:
        ref += (for_committors[1] -
                np.power(tprob[1, 1], np.arange(5000)).sum() *
                tprob[1, 3])
    ref = [0, ref / 100., for_committors[2], 0]

    npt.assert_array_almost_equal(ref, cond_committors, decimal=2)
Beispiel #9
0
def test_5():
    trjs = DoubleWell(random_state=0).get_cached().trajectories
    clusterer = NDGrid(n_bins_per_feature=5)
    mle_msm = MarkovStateModel(lag_time=100, verbose=False)
    b_msm = BayesianMarkovStateModel(lag_time=100,
                                     n_samples=1000,
                                     n_chains=8,
                                     n_steps=1000,
                                     random_state=0)

    states = clusterer.fit_transform(trjs)
    b_msm.fit(states)
    mle_msm.fit(states)

    # this is a pretty silly test. it checks that the mean transition
    # matrix is not so dissimilar from the MLE transition matrix.
    # This shouldn't necessarily be the case anyways -- the likelihood is
    # not "symmetric". And the cutoff chosen is just heuristic.
    assert np.linalg.norm(
        b_msm.all_transmats_.mean(axis=0) - mle_msm.transmat_) < 1e-2
def test_3():
    trajectory = [0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 0, 0, 0, 2, 2, 2, 0, 0, 0]
    msm1 = BayesianMarkovStateModel(sampler="metzner", n_steps=1, n_samples=100, n_chains=1, random_state=0)
    msm1.fit([trajectory])
    msm2 = BayesianMarkovStateModel(sampler="metzner_py", n_steps=1, n_samples=100, n_chains=1, random_state=0)
    msm2.fit([trajectory])

    np.testing.assert_array_almost_equal(msm1.all_transmats_, msm2.all_transmats_)

    assert msm1.all_timescales_.shape == (100, 2)
    assert msm1.all_eigenvalues_.shape == (100, 3)
    assert msm1.all_left_eigenvectors_.shape == (100, 3, 3)
    assert msm1.all_right_eigenvectors_.shape == (100, 3, 3)
    assert msm1.all_populations_.shape == (100, 3)
    np.testing.assert_array_almost_equal(msm1.all_populations_.sum(axis=1), np.ones(100))
Beispiel #11
0
from nose.plugins.skip import SkipTest
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase

from ..plots import plot_tpaths
from . import PlotTestCase

rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)

msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)


class TestTPTPlot(PlotTestCase):
    """Test the function(s) that visualize TPTs."""
    def test_plot_tpaths_msm(self):
        ax = plot_tpaths(msm, 0, 9)

        assert isinstance(ax, SubplotBase)

    @SkipTest
    def test_plot_tpaths_bmsm(self):
        ax = plot_tpaths(bmsm, 0, 9)

        assert isinstance(ax, SubplotBase)
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid

from ..plots import (plot_pop_resids, plot_msm_network, plot_timescales,
                     plot_implied_timescales)
from . import PlotTestCase

rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)


class TestMSMPlot(PlotTestCase):
    """Test the function(s) that visualize MSMs."""

    def test_plot_pop_resids(self):
        ax = plot_pop_resids(msm)

        assert isinstance(ax, JointGrid)

    def test_plot_msm_network(self):
        ax = plot_msm_network(msm)

        assert isinstance(ax, SubplotBase)

    def test_plot_timescales_msm(self):