def test_muller_potential_stats(): # Set constants n_seq = 1 num_trajs = 1 T = 2500 num_hotstart = 0 # Generate data warnings.filterwarnings("ignore", category=DeprecationWarning) muller = MullerModel() data, trajectory, start = \ muller.generate_dataset(n_seq, num_trajs, T) n_features = muller.x_dim n_components = muller.K # Fit reference model and initial MSLDS model refmodel = GaussianHMM(n_components=n_components, covariance_type='full').fit(data) model = MetastableSwitchingLDS(n_components, n_features, n_hotstart=num_hotstart) model.inferrer._sequences = data model.means_ = refmodel.means_ model.covars_ = refmodel.covars_ model.transmat_ = refmodel.transmat_ model.populations_ = refmodel.startprob_ As = [] for i in range(n_components): As.append(np.zeros((n_features, n_features))) model.As_ = As model.Qs_ = refmodel.covars_ model.bs_ = refmodel.means_ iteration = 0 # Remove this step once hot_start is factored out logprob, stats = model.inferrer.do_estep() rlogprob, rstats = reference_estep(refmodel, data) yield lambda: np.testing.assert_array_almost_equal(stats['post'], rstats['post'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['post[1:]'], rstats['post[1:]'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['post[:-1]'], rstats['post[:-1]'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['obs'], rstats['obs'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['obs[1:]'], rstats['obs[1:]'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['obs[:-1]'], rstats['obs[:-1]'], decimal=3) yield lambda: np.testing.assert_array_almost_equal(stats['obs*obs.T'], rstats['obs*obs.T'], decimal=3) yield lambda: np.testing.assert_array_almost_equal( stats['obs*obs[t-1].T'], rstats['obs*obs[t-1].T'], decimal=3) yield lambda: np.testing.assert_array_almost_equal( stats['obs[1:]*obs[1:].T'], rstats['obs[1:]*obs[1:].T'], decimal=3) yield lambda: np.testing.assert_array_almost_equal( stats['obs[:-1]*obs[:-1].T'], rstats['obs[:-1]*obs[:-1].T'], decimal=3) yield lambda: np.testing.assert_array_almost_equal( stats['trans'], rstats['trans'], decimal=1)
def test_muller_potential_mstep(): import pdb, traceback, sys try: # Set constants n_seq = 1 num_trajs = 1 T = 2500 # Generate data warnings.filterwarnings("ignore", category=DeprecationWarning) muller = MullerModel() data, trajectory, start = \ muller.generate_dataset(n_seq, num_trajs, T) n_features = muller.x_dim n_components = muller.K # Fit reference model and initial MSLDS model refmodel = GaussianHMM(n_components=n_components, covariance_type='full').fit(data) # Obtain sufficient statistics from refmodel rlogprob, rstats = reference_estep(refmodel, data) means = refmodel.means_ covars = refmodel.covars_ transmat = refmodel.transmat_ populations = refmodel.startprob_ As = [] for i in range(n_components): As.append(np.zeros((n_features, n_features))) Qs = refmodel.covars_ bs = refmodel.means_ means = refmodel.means_ covars = refmodel.covars_ # Test AQB solver for MSLDS solver = MetastableSwitchingLDSSolver(n_components, n_features) solver.do_mstep(As, Qs, bs, means, covars, rstats) except: type, value, tb = sys.exc_info() traceback.print_exc() pdb.post_mortem(tb)
def test_muller_potential(): import pdb, traceback, sys try: # Set constants n_hotstart = 3 n_em_iter = 3 n_experiments = 1 n_seq = 1 num_trajs = 1 T = 2500 sim_T = 2500 gamma = 200. # Generate data warnings.filterwarnings("ignore", category=DeprecationWarning) muller = MullerModel() data, trajectory, start = \ muller.generate_dataset(n_seq, num_trajs, T) n_features = muller.x_dim n_components = muller.K # Train MSLDS model = MetastableSwitchingLDS(n_components, n_features, n_hotstart=n_hotstart, n_em_iter=n_em_iter, n_experiments=n_experiments) model.fit(data, gamma=gamma) mslds_score = model.score(data) print("MSLDS Log-Likelihood = %f" % mslds_score) # Fit Gaussian HMM for comparison g = GaussianFusionHMM(n_components, n_features) g.fit(data) hmm_score = g.score(data) print("HMM Log-Likelihood = %f" % hmm_score) # Clear Display plt.cla() plt.plot(trajectory[start:, 0], trajectory[start:, 1], color='k') plt.scatter(model.means_[:, 0], model.means_[:, 1], color='r', zorder=10) plt.scatter(data[0][:, 0], data[0][:, 1], edgecolor='none', facecolor='k', zorder=1) Delta = 0.5 minx = min(data[0][:, 0]) maxx = max(data[0][:, 0]) miny = min(data[0][:, 1]) maxy = max(data[0][:, 1]) sim_xs, sim_Ss = model.sample(sim_T, init_state=0, init_obs=model.means_[0]) minx = min(min(sim_xs[:, 0]), minx) - Delta maxx = max(max(sim_xs[:, 0]), maxx) + Delta miny = min(min(sim_xs[:, 1]), miny) - Delta maxy = max(max(sim_xs[:, 1]), maxy) + Delta plt.scatter(sim_xs[:, 0], sim_xs[:, 1], edgecolor='none', zorder=5, facecolor='g') plt.plot(sim_xs[:, 0], sim_xs[:, 1], zorder=5, color='g') MullerForce.plot(ax=plt.gca(), minx=minx, maxx=maxx, miny=miny, maxy=maxy) plt.show() except: type, value, tb = sys.exc_info() traceback.print_exc() pdb.post_mortem(tb)