def test_recover_timescale(): trajs = double_well_discrete().simulate_trajectories(n_trajectories=100, n_steps=50000) ts = double_well_discrete().analytic_msm.timescales(1)[0] counts = TransitionCountEstimator(1, 'sliding').fit(trajs).fetch_model() msm = MaximumLikelihoodMSM().fit(counts.submodel_largest()).fetch_model() ts_rec = msm.timescales(1)[0] np.testing.assert_(np.abs(ts - ts_rec) <= 200.)
def msm_double_well(lagtime=100, reversible=True, **kwargs) -> MaximumLikelihoodMSM: count_model = TransitionCountEstimator(lagtime=lagtime, count_mode="sliding")\ .fit(datasets.double_well_discrete().dtraj).fetch_model().submodel_largest() est = MaximumLikelihoodMSM(reversible=reversible, **kwargs) est.fit(count_model) return est
def bmsm_double_well(lagtime=100, nsamples=100, reversible=True, constrain_to_coarse_pi=False, **kwargs) -> BayesianMSM: """ :param lagtime: :param nsamples: :param statdist_contraint: :return: tuple(Estimator, Model) """ # load observations obs_micro = datasets.double_well_discrete().dtraj # stationary distribution pi_micro = datasets.double_well_discrete( ).analytic_msm.stationary_distribution pi_macro = np.zeros(2) pi_macro[0] = pi_micro[0:50].sum() pi_macro[1] = pi_micro[50:].sum() # coarse-grain microstates to two metastable states cg = np.zeros(100, dtype=int) cg[50:] = 1 obs_macro = cg[obs_micro] distribution_constraint = pi_macro if constrain_to_coarse_pi else None counting = TransitionCountEstimator(lagtime=lagtime, count_mode="effective")\ .fit(obs_macro).fetch_model().submodel_largest(probability_constraint=distribution_constraint) est = BayesianMSM( reversible=reversible, n_samples=nsamples, stationary_distribution_constraint=distribution_constraint, **kwargs) est.fit(counting) return est
def __init__(self, init_dist_prior, tmat_prior): self.obs = double_well_discrete().dtraj self.n_states = 2 self.n_samples = 100 self.lag = 10 self.est = BayesianHMM.default( dtrajs=self.obs, n_hidden_states=self.n_states, lagtime=self.lag, reversible=True, n_samples=self.n_samples, initial_distribution_prior=init_dist_prior, transition_matrix_prior=tmat_prior) self.bhmm = self.est.fit(self.obs).fetch_model()
def setUpClass(cls): # load observations obs = double_well_discrete().dtraj # hidden states cls.n_states = 2 # samples cls.n_samples = 100 cls.lag = 10 cls.est = BayesianHMM.default( dtrajs=obs, n_hidden_states=cls.n_states, lagtime=cls.lag, reversible=True, n_samples=cls.n_samples ) # cls.est = bayesian_hidden_markov_model([obs], cls.n_states, cls.lag, reversible=True, n_samples=cls.n_samples) cls.bhmm = cls.est.fit(obs).fetch_model() assert isinstance(cls.bhmm, BayesianHMMPosterior)
r""" Double-well discrete ==================== Showcase use of the :meth:`deeptime.data.double_well_discrete` dataset. """ import matplotlib.pyplot as plt import numpy as np from deeptime.data import double_well_discrete dwd = double_well_discrete() n_states = dwd.analytic_msm.n_states divides = [40, 45, 50, 55, 60] dtraj = dwd.dtraj_n(divides) divides = np.array([0] + divides + [n_states]) f, ax = plt.subplots(1, 1) f.suptitle( "Discrete double well with good\ndiscretization of transition region") ax.hist(divides[dtraj], divides, density=True, alpha=.5, color='C0', edgecolor='black', label='Empirical distribution') ax.bar(np.arange(n_states), dwd.analytic_msm.stationary_distribution, color='C1', alpha=.5,
def test_cache(): # load only once other_msm = MarkovStateModel(double_well_discrete().transition_matrix) assert double_well_discrete().analytic_msm is not other_msm assert double_well_discrete().analytic_msm is double_well_discrete( ).analytic_msm