예제 #1
0
def test_hmm_likelihood(T=500, K=5, D=2):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t - 1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pyhsmm.basic.distributions import Gaussian
    hmm = OldHMM(
        [Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
        trans_matrix=A,
        init_state_distn="uniform")
    true_lkhd = hmm.log_likelihood(y)

    # Make an HMM with these parameters
    hmm = HMM(K, D, observations="gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations.inv_sigmas = np.log(sigma) * np.ones((K, D))
    test_lkhd = hmm.log_probability(y)

    assert np.allclose(true_lkhd, test_lkhd)
예제 #2
0
def test_viterbi(T=1000, K=20, D=2):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t-1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pyhsmm.basic.distributions import Gaussian
    oldhmm = OldHMM([Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
                  trans_matrix=A,
                  init_state_distn="uniform")
    oldhmm.add_data(y)
    states = oldhmm.states_list.pop()
    states.Viterbi()
    z_star = states.stateseq

    # Make an HMM with these parameters
    hmm = ssm.HMM(K, D, observations="diagonal_gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations.sigmasq = sigma * np.ones((K, D))
    z_star2 = hmm.most_likely_states(y)

    assert np.allclose(z_star, z_star2)
예제 #3
0
def test_hmm_likelihood_perf(T=10000, K=50, D=20):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t - 1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pybasicbayes.distributions import Gaussian
    oldhmm = OldHMM(
        [Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
        trans_matrix=A,
        init_state_distn="uniform")

    states = oldhmm.add_data(y)
    tic = time()
    true_lkhd = states.log_likelihood()
    pyhsmm_dt = time() - tic
    print("PyHSMM: ", pyhsmm_dt, "sec. Val: ", true_lkhd)

    # Make an HMM with these parameters
    hmm = ssm.HMM(K, D, observations="gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations._sqrt_Sigmas = np.sqrt(sigma) * np.array(
        [np.eye(D) for k in range(K)])

    tic = time()
    test_lkhd = hmm.log_probability(y)
    smm_dt = time() - tic
    print("SMM HMM: ", smm_dt, "sec. Val: ", test_lkhd)

    # Make an ARHMM with these parameters
    arhmm = ssm.HMM(K, D, observations="ar")
    tic = time()
    arhmm.log_probability(y)
    arhmm_dt = time() - tic
    print("SSM ARHMM: ", arhmm_dt, "sec.")

    # Make an ARHMM with these parameters
    arhmm = ssm.HMM(K, D, observations="ar")
    tic = time()
    arhmm.expected_states(y)
    arhmm_dt = time() - tic
    print("SSM ARHMM Expectations: ", arhmm_dt, "sec.")
예제 #4
0
def test_expectations(T=1000, K=20, D=2):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t - 1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pyhsmm.basic.distributions import Gaussian
    hmm = OldHMM(
        [Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
        trans_matrix=A,
        init_state_distn="uniform")
    hmm.add_data(y)
    states = hmm.states_list.pop()
    states.E_step()
    true_Ez = states.expected_states
    true_E_trans = states.expected_transcounts

    # Make an HMM with these parameters
    hmm = HMM(K, D, observations="gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations.inv_sigmas = np.log(sigma) * np.ones((K, D))
    test_Ez, test_Ezzp1, _ = hmm.expected_states(y)
    test_E_trans = test_Ezzp1.sum(0)

    print(true_E_trans.round(3))
    print(test_E_trans.round(3))

    assert np.allclose(true_Ez, test_Ez)
    assert np.allclose(true_E_trans, test_E_trans)
예제 #5
0
def run_HMM(time_series, **kwargs):
    # Directly use knowledge of the number of modes
    K = kwargs['num_gt_labels']
    D_obs = kwargs['data_dim']

    obs_hypparams = {
        'mu_0': np.zeros(D_obs),
        'sigma_0': np.eye(D_obs),
        'kappa_0': 0.10,
        'nu_0': D_obs + 2
    }

    obs_distns = [
        pyhsmm.distributions.Gaussian(**obs_hypparams) for _ in range(K)
    ]

    hmm = HMM(alpha=kwargs['alpha'],
              init_state_concentration=kwargs['init_state_concentration'],
              obs_distns=obs_distns)

    return inference(time_series, hmm, **kwargs)
        init_state_concentration=10.,
        alpha=6.,gamma=6.,
        obs_distns=GMMs,
        dur_distns=true_dur_distns)

data, truelabels = truemodel.generate(1000)

#################
#  remove data  #
#################

# using an HMM of course

deletion_hmm = HMM(
        init_state_concentration=10.,
        alpha=6.,gamma=6.,
        obs_distns=GMMs[:2], # placeholders, not really used
        )

_, todelete = deletion_hmm.generate(data.shape[0])

data[todelete == 0] == np.nan

#####################################
#  set up FrozenMixture components  #
#####################################

# list of all Gaussians
component_library = [c for m in GMMs for c in m.components]
library_size = len(component_library)
예제 #7
0
                                 alpha=6.,
                                 gamma=6.,
                                 obs_distns=GMMs,
                                 dur_distns=true_dur_distns)

data, truelabels = truemodel.generate(1000)

#################
#  remove data  #
#################

# using an HMM of course

deletion_hmm = HMM(
    init_state_concentration=10.,
    alpha=6.,
    gamma=6.,
    obs_distns=GMMs[:2],  # placeholders, not really used
)

_, todelete = deletion_hmm.generate(data.shape[0])

data[todelete == 0] == np.nan

#####################################
#  set up FrozenMixture components  #
#####################################

# list of all Gaussians
component_library = [c for m in GMMs for c in m.components]
library_size = len(component_library)