コード例 #1
0
 def _fit_once():
     # fit a model
     if rank is not None:
         arhmm = ssm.HMM(Kmax, rank, observations="ar")
     else:
         arhmm = ssm.HMM(Kmax, N, observations="ar")
     lls = arhmm.fit(Xp, num_iters=num_iters)
     return arhmm, lls
コード例 #2
0
def test_hmm_likelihood_perf(T=10000, K=50, D=20):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t - 1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pybasicbayes.distributions import Gaussian
    oldhmm = OldHMM(
        [Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
        trans_matrix=A,
        init_state_distn="uniform")

    states = oldhmm.add_data(y)
    tic = time()
    true_lkhd = states.log_likelihood()
    pyhsmm_dt = time() - tic
    print("PyHSMM: ", pyhsmm_dt, "sec. Val: ", true_lkhd)

    # Make an HMM with these parameters
    hmm = ssm.HMM(K, D, observations="gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations._sqrt_Sigmas = np.sqrt(sigma) * np.array(
        [np.eye(D) for k in range(K)])

    tic = time()
    test_lkhd = hmm.log_probability(y)
    smm_dt = time() - tic
    print("SMM HMM: ", smm_dt, "sec. Val: ", test_lkhd)

    # Make an ARHMM with these parameters
    arhmm = ssm.HMM(K, D, observations="ar")
    tic = time()
    arhmm.log_probability(y)
    arhmm_dt = time() - tic
    print("SSM ARHMM: ", arhmm_dt, "sec.")

    # Make an ARHMM with these parameters
    arhmm = ssm.HMM(K, D, observations="ar")
    tic = time()
    arhmm.expected_states(y)
    arhmm_dt = time() - tic
    print("SSM ARHMM Expectations: ", arhmm_dt, "sec.")
コード例 #3
0
ファイル: test_basics.py プロジェクト: zhaoyuqi788/ssm
def test_viterbi(T=1000, K=20, D=2):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t-1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pyhsmm.basic.distributions import Gaussian
    oldhmm = OldHMM([Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
                  trans_matrix=A,
                  init_state_distn="uniform")
    oldhmm.add_data(y)
    states = oldhmm.states_list.pop()
    states.Viterbi()
    z_star = states.stateseq

    # Make an HMM with these parameters
    hmm = ssm.HMM(K, D, observations="diagonal_gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations.sigmasq = sigma * np.ones((K, D))
    z_star2 = hmm.most_likely_states(y)

    assert np.allclose(z_star, z_star2)
コード例 #4
0
def test_hmm_likelihood(T=1000, K=5, D=2):
    # Create a true HMM
    A = npr.rand(K, K)
    A /= A.sum(axis=1, keepdims=True)
    A = 0.75 * np.eye(K) + 0.25 * A
    C = npr.randn(K, D)
    sigma = 0.01

    # Sample from the true HMM
    z = np.zeros(T, dtype=int)
    y = np.zeros((T, D))
    for t in range(T):
        if t > 0:
            z[t] = np.random.choice(K, p=A[z[t - 1]])
        y[t] = C[z[t]] + np.sqrt(sigma) * npr.randn(D)

    # Compare to pyhsmm answer
    from pyhsmm.models import HMM as OldHMM
    from pybasicbayes.distributions import Gaussian
    oldhmm = OldHMM(
        [Gaussian(mu=C[k], sigma=sigma * np.eye(D)) for k in range(K)],
        trans_matrix=A,
        init_state_distn="uniform")
    true_lkhd = oldhmm.log_likelihood(y)

    # Make an HMM with these parameters
    hmm = ssm.HMM(K, D, observations="diagonal_gaussian")
    hmm.transitions.log_Ps = np.log(A)
    hmm.observations.mus = C
    hmm.observations.sigmasq = sigma * np.ones((K, D))
    test_lkhd = hmm.log_probability(y)

    assert np.allclose(true_lkhd, test_lkhd)
コード例 #5
0
def _fit_hmm(K, X_train, y_train, X_test, y_test, transitions, prior_weight,
             true_hmm):
    """
    Fits HMM with given parameters 20 times and returns all of them in a list
    of dictionaries
    """

    X_train = X_train.astype(float)
    X_test = X_test.astype(float)
    input_size = X_train.shape[1]
    hmm = ssm.HMM(K,
                  D,
                  M=input_size,
                  observations="fixedlogistic",
                  transitions=transitions,
                  transition_kwargs={"log_Ps": true_hmm.log_Ps},
                  observation_kwargs={
                      "input_size": input_size,
                      "prior_weight": prior_weight,
                      "set_coef": true_hmm.coefs
                  })
    lls = hmm.fit(y_train, inputs=X_train, method="em")
    test_ll = hmm.log_likelihood(y_test, inputs=X_test)
    result = {}
    result['hmm'] = hmm
    result['lls'] = lls
    result['test_ll'] = test_ll
    return result
コード例 #6
0
ファイル: neurglmhmm-test.py プロジェクト: chingf/glm-hmm
def _fit_hmm(K, X_train, y_train, X_test, y_test, prior_weight, transitions,
             init_weight):
    """
    Fits HMM with given parameters 20 times and returns all of them in a list
    of dictionaries
    """

    input_size = X_train.shape[1]
    hmm = ssm.HMM(K,
                  D,
                  M=input_size,
                  observations="logistic",
                  transitions=transitions,
                  observation_kwargs={
                      "input_size": input_size,
                      "prior_weight": prior_weight,
                      "init_weight": init_weight
                  })
    if init_weight is None:
        initialize = False
    else:
        initialize = True
    lls = hmm.fit(y_train, inputs=X_train, method="em", initialize=initialize)
    test_ll = hmm.log_likelihood(y_test, inputs=X_test)
    result = {}
    result['hmm'] = hmm
    result['lls'] = lls
    result['test_ll'] = test_ll
    return result
コード例 #7
0
    def test_hmm(self):
        T = 100  # number of time bins
        K = 5  # number of discrete states
        D = 2  # dimension of the observations

        # make an hmm and sample from it
        hmm = ssm.HMM(K, D, observations="gaussian")
        z, y = hmm.sample(T)
        print(z)
        print(y)

        #Fitting an HMM is simple.

        test_hmm = ssm.HMM(K, D, observations="gaussian")
        test_hmm.fit(y)
        zhat = test_hmm.most_likely_states(y)
        print(zhat)
コード例 #8
0
ファイル: test_basics.py プロジェクト: zhaoyuqi788/ssm
def test_constrained_hmm(T=100, K=3, D=3):
    hmm = ssm.HMM(K, D, M=0,
                  transitions="constrained",
                  observations="gaussian")
    z, x = hmm.sample(T)

    transition_mask = np.array([
        [1, 0, 1],
        [1, 0, 0],
        [1, 0, 1],
    ]).astype(bool)
    init_Ps = np.random.rand(3, 3)
    init_Ps /= init_Ps.sum(axis=-1, keepdims=True)
    transition_kwargs = dict(
        transition_mask=transition_mask
    )
    fit_hmm = ssm.HMM(K, D, M=0,
                  transitions="constrained",
                  observations="gaussian",
                  transition_kwargs=transition_kwargs)
    fit_hmm.fit(x)
    learned_Ps = fit_hmm.transitions.transition_matrix
    assert np.all(learned_Ps[~transition_mask] == 0)
コード例 #9
0
    def _model_init(self, dataset):
        state_list = dataset.get_state_list()
        observation_list = dataset.get_obs_list()

        K = len(self._state_list)  # number of discrete states
        D = len(self._observation_list)  # dimension of the observation
        self._hmm = ssm.HMM(K, D, observations='bernoulli')

        # note:
        # the log classes govern their own data type
        # therefore set_pi for example transforms matrix to Prob domain itsself
        # no action is needed
        assert self._act_data != None and self._act_data != {}
        init_pi = gen_handcrafted_priors_for_pi(self._act_data, K)
        self.set_new_pi(init_pi)
コード例 #10
0
ファイル: fit_and_viterbi.py プロジェクト: vala1958/network
def fitting(trial_num_states, obs_dim, model, obs, optimizer, N_iters):
    """
    fits the model parameter (transition matrix/emission matrix) to the data and returns the likelehoods for
    each iteration
    :param trial_num_states:   integer, number of hidden states
    :param obs_dim:            integer, dimension of the input channels (original 27, reduced: 2-4)
    :param model:              string, distribution model of the data (eg. gaussian)
    :param obs:                2d array, (time_bins, obs_dim), data
    :param optimizer:          string ('em' or 'sgd'
    :param N_iters:            integer, number of iterations
    :return:                   floats, hmm_lls for each iteraton
    """

    hmm = ssm.HMM(trial_num_states, obs_dim, observations=model)

    hmm_lls = hmm.fit(obs,
                      method=optimizer,
                      num_iters=N_iters,
                      init_method="kmeans")

    return hmm, hmm_lls
コード例 #11
0
#import numpy as np
import numpy.random as npr
import matplotlib.pyplot as plt
import ssm
from ssm.util import one_hot, find_permutation

# %%
num_states = 2  # number of discrete states           K
obs_dim = 1  # number of observed dimensions       D
num_categories = 2  # number of categories for output     C  #should not be used here for continous output
input_dim = 3  # input dimensions                    M

# Make a GLM-HMM
true_glmhmm = ssm.HMM(num_states,
                      obs_dim,
                      input_dim,
                      observations="categorical",
                      observation_kwargs=dict(C=num_categories),
                      transitions="inputdriven")  #fix to be both driven!

# %%  replace from here for now
#true_glmhmm.observations = GLM_PoissonObservations(num_states, obs_dim, input_dim)
true_glmhmm.observations = InputVonMisesObservations(num_states, obs_dim,
                                                     input_dim)
print(true_glmhmm.transitions.Ws.shape)
print(true_glmhmm.observations.mus.shape)

# %%
#input sequence
true_glmhmm.transitions.Ws *= 3
num_sess = 10  # number of example sessions
num_trials_per_sess = 1000  # number of trials in a session
コード例 #12
0
npr.seed(0)

# In[2]:

# Set the parameters of the HMM
T = 1000  # number of time bins
K = 2  # number of discrete states
D = 1  # data dimension
M = 1  # input dimension
C = 3  # number of output types/categories

# Make an HMM
true_hmm = ssm.HMM(K,
                   D,
                   M,
                   observations="categorical",
                   observation_kwargs=dict(C=C),
                   transitions="inputdriven")

# Optionally, turn up the input weights to exaggerate the effect
# true_hmm.transitions.Ws *= 3

# Create an exogenous input
inpt = np.sin(2 * np.pi * np.arange(T) / 50)[:, None] + 1e-1 * npr.randn(T, M)

# Sample some data from the HMM
z, y = true_hmm.sample(T, input=inpt)

# Compute the true log probability of the data, summing out the discrete states
true_lp = true_hmm.log_probability(y, inputs=inpt)
コード例 #13
0
ファイル: hyperparameter.py プロジェクト: vala1958/network
def loop_over_states_and_statistics(max_num_states, num_loops_statistics,
                                    N_iters, e_model, obs, verbose):
    """
    Inner loop: fit the model for the even and calc the log likelihood (llh) with that model for the odd and vice versa
    the metric for finding a good guess for the number of hidden states  is then the sum of these two
    log likelihoods (Lit: Celeux, Durand). The AIC criterion taking into account
    the Occram's razor principle (take the simples model which can be described with few params) based on the even-odd
    cross sum llh is also calculated and stored as well as the "normal" llh. The values are normalized. The codes
    for fitting and llh calculation are in the ssm package of the linderman's lab. The results is stored in a list which
    are the value of a dictionary ("dict_indicators"), the keys are the number of hidden states

    outer loop: loop for  statistics, do the inner loop n times to get n plots to compare (do the fit from other
    init values). Each inner loop produces a dictionary (keys: number of hidden states, values: list of llh's) These
    dictionaries are stored in a list (appended in each loop)

    :return: a list of dictionaries ("lst_dicts") containing the values of the e/o llh, AIC and llh for each number of
             hidden state   """

    lst_dicts = []
    lst_number_states = number_states(max_num_states)
    obs_even, obs_odd, obs_dim = get_even_odds(obs)

    # insert loop for statistics (gives in the end num_loop of  plots)
    for j in range(num_loops_statistics):

        dict_indicators = {}

        # loop over states
        for i in range(len(lst_number_states)):

            # fit model for the even and calc the log likeliood with that model(parmas) for the odd and vice versa
            # the indicator is then the sum of these two log likelihoods (Lit: Celeux, Durand)

            # open value list for dictionary of indicators
            lst = []

            even_hmm = ssm.HMM(lst_number_states[i],
                               obs_dim,
                               observations=e_model,
                               transitions="standard")

            # Fit
            hmm_lps = even_hmm.fit(obs_even,
                                   method="em",
                                   num_iters=N_iters,
                                   verbose=0)
            # llh_even = even_hmm.log_likelihood(obs_odd)

            odd_hmm = ssm.HMM(lst_number_states[i],
                              obs_dim,
                              observations=e_model,
                              transitions="standard")

            # Fit
            hmm_lps = odd_hmm.fit(obs_odd,
                                  method="em",
                                  num_iters=N_iters,
                                  verbose=0)
            # llh_odd = odd_hmm.log_likelihood(obs_even)

            # store sum of both (the criteria, normalized)
            sum_llh = even_hmm.log_likelihood(
                obs_odd) + odd_hmm.log_likelihood(obs_even)
            lst.append(sum_llh / np.size(obs, 0))

            # loglikelihood ohne cross validation
            hmm = ssm.HMM(lst_number_states[i],
                          obs_dim,
                          observations="Gaussian",
                          transitions="standard")

            # Fit (append the normalised value)
            hmm_lps = hmm.fit(obs, method="em", num_iters=N_iters, verbose=0)
            llh = hmm.log_likelihood(obs)
            lst.append(llh / np.size(obs, 0))

            # calculate AIC from cross sum llh (normalized value)
            lst.append(
                AIC(lst_number_states[i], obs_dim, sum_llh) / np.size(obs, 0))

            # insert the list as value into dict
            dict_indicators[lst_number_states[i]] = lst

        lst_dicts.append(dict_indicators)
        lst_table = nice_table(lst_dicts)

        print()
        print('loop {}:'.format(j))
        if verbose:
            print(lst_table[j].T)

    return lst_dicts, lst_table
コード例 #14
0
def test_sample(T=10, K=4, D=3, M=2):
    """
    Test that we can construct and sample an HMM
    with or withou, prefixes, noise, and noise.
    """
    transition_names = [
        "standard", "sticky", "inputdriven", "recurrent", "recurrent_only",
        "rbf_recurrent", "nn_recurrent"
    ]

    observation_names = [
        "gaussian", "diagonal_gaussian", "t", "diagonal_t", "exponential",
        "bernoulli", "categorical", "poisson", "vonmises", "ar", "no_input_ar",
        "diagonal_ar", "independent_ar", "robust_ar", "no_input_robust_ar",
        "diagonal_robust_ar"
    ]

    # Sample basic (no prefix, inputs, etc.)
    for transitions in transition_names:
        for observations in observation_names:
            hmm = ssm.HMM(K,
                          D,
                          M=0,
                          transitions=transitions,
                          observations=observations)
            zsmpl, xsmpl = hmm.sample(T)

    # Sample with prefix
    for transitions in transition_names:
        for observations in observation_names:
            hmm = ssm.HMM(K,
                          D,
                          M=0,
                          transitions=transitions,
                          observations=observations)
            zpre, xpre = hmm.sample(3)
            zsmpl, xsmpl = hmm.sample(T, prefix=(zpre, xpre))

    # Sample with inputs
    for transitions in transition_names:
        for observations in observation_names:
            hmm = ssm.HMM(K,
                          D,
                          M=M,
                          transitions=transitions,
                          observations=observations)
            zpre, xpre = hmm.sample(3, input=npr.randn(3, M))
            zsmpl, xsmpl = hmm.sample(T,
                                      prefix=(zpre, xpre),
                                      input=npr.randn(T, M))

    # Sample without noise
    for transitions in transition_names:
        for observations in observation_names:
            hmm = ssm.HMM(K,
                          D,
                          M=M,
                          transitions=transitions,
                          observations=observations)
            zpre, xpre = hmm.sample(3, input=npr.randn(3, M))
            zsmpl, xsmpl = hmm.sample(T,
                                      prefix=(zpre, xpre),
                                      input=npr.randn(T, M),
                                      with_noise=False)
コード例 #15
0
             color=colors[1],
             label="MF" if n == 0 else None)
    plt.plot(q_struct_y[:, n] + 4 * n,
             ':',
             color=colors[2],
             label="Struct" if n == 0 else None)
plt.legend()
plt.xlabel("time")

# # Fit an HMM to the LDS states

# In[13]:

N_iters = 50
K = 15
hmm = ssm.HMM(K, D, observations="gaussian")
hmm_lls = hmm.fit(x, method="em", num_em_iters=N_iters)
z = hmm.most_likely_states(x)

# In[14]:

plt.plot(hmm_lls, label="EM")
plt.xlabel("EM Iteration")
plt.ylabel("Log Probability")
plt.legend(loc="lower right")

# In[15]:

# Plot the observation distributions
from hips.plotting.colormaps import white_to_color_cmap
コード例 #16
0
 def __init__(self, x_true):
     self.test_hmm = ssm.HMM(K, D, observations="bernoulli")
     self.test_hmm.fit(x_true)
     self.K = K
コード例 #17
0
import autograd.numpy.random as npr
npr.seed(0)

import matplotlib
import matplotlib.pyplot as plt

import ssm
from ssm.util import find_permutation

# Set the parameters of the HMM
T = 500  # number of time bins
K = 5  # number of discrete states
D = 2  # number of observed dimensions

# Make an HMM with the true parameters
true_hmm = ssm.HMM(K, D, observations="diagonal_gaussian")
z, y = true_hmm.sample(T)
z_test, y_test = true_hmm.sample(T)
true_ll = true_hmm.log_probability(y)

# Fit models
N_sgd_iters = 1000
N_em_iters = 100

# A bunch of observation models that all include the
# diagonal Gaussian as a special case.
observations = [
    "diagonal_gaussian", "gaussian", "diagonal_t", "studentst", "diagonal_ar",
    "ar", "diagonal_robust_ar", "robust_ar"
]
コード例 #18
0
plt.plot(x[:, 0], x[:, 1])

# In[7]:

mog = MixtureOfGaussians(100, D)
mog.fit(x)

# In[8]:

plt.plot(x[:, 0], x[:, 1])
for mu in mog.observations.mus:
    plt.plot(mu[0], mu[1], 'o')

# In[9]:

arhmm = ssm.HMM(K=8, D=D, observations="ar")
arhmm.fit(x)

# In[10]:

z_smpl, x_smpl = arhmm.sample(T=3000)
plt.plot(x_smpl[:, 0], x_smpl[:, 1])

# In[11]:


def sample(T=3000,
           num_samples=25,
           num_burnin=100,
           schedule=None,
           filename="samples.mp4"):
コード例 #19
0
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %% load circuit results
obs = rr.T.copy() #rr[0,:][:,None] #
inpt = stim.T.copy()  #stim[0,:][:,None] #
# Set the parameters of the HMM
time_bins = obs.shape[1] # number of time bins
num_states = 3    # number of discrete states
obs_dim = obs.shape[1]    # data dimension
input_dim = stim.shape[0]    # input dimension
num_categories = 3    # number of output types/categories

# %% run driven HMM inference
# Now create a new HMM and fit it to the data with EM
N_iters = 100
hmm = ssm.HMM(num_states, obs_dim, input_dim, 
          observations="gaussian", #observation_kwargs=dict(C=num_categories),
          transitions="inputdriven")

# Fit
hmm_lps = hmm.fit(obs, inputs=inpt, method="em", num_iters=N_iters)

# %%
# Plot the log probabilities of the true and fit models
plt.figure()
plt.plot(hmm_lps, label="EM")
plt.legend(loc="lower right")
plt.xlabel("EM Iteration")
plt.xlim(0, N_iters)
plt.ylabel("Log Probability")
plt.show()
コード例 #20
0
npr.seed(0)

import matplotlib
import matplotlib.pyplot as plt

import ssm
from ssm.util import find_permutation

# Set the parameters of the HMM
T = 500  # number of time bins
K = 5  # number of discrete states
D = 2  # number of observed dimensions

# Make an HMM with the true parameters
true_hmm = ssm.HMM(K, D, observations="exponential")
z, y = true_hmm.sample(T)
z_test, y_test = true_hmm.sample(T)
true_ll = true_hmm.log_probability(y)

# Fit models
N_sgd_iters = 1000
N_em_iters = 100

# A bunch of observation models that all include the
# diagonal Gaussian as a special case.
observations = ["exponential"]

# Fit with both SGD and EM
methods = ["sgd", "em"]
コード例 #21
0
# loads training data from mat-file, saves posterior probabilities into ssm_posterior_probs.mat

import numpy
import ssm
from scipy import io
import time
import os

# Build an HMM instance and set parameters
#np.random.seed(1)
num_states = 20  # number of discrete states
obs_dim = 14  # dimensionality of observation
#cov="gaussian"
#cov="diagonal_gaussian"
cov = 'autoregressive'
hmm = ssm.HMM(num_states, obs_dim, observations=cov)

#load data using loadmat
mat = io.loadmat('training_data.mat')
#mat=io.loadmat('C:/Users/Kat/Resilio Sync/Prey Capture/state_epoch_clips-06-Jan-2021/training_data.mat')
X = mat['X']

#fit hmm to data
N_iters = 200
#N_iters=10
hmm_lls = hmm.fit(X, method="em", num_iters=N_iters)
Z = hmm.most_likely_states(X)
Ps = hmm.expected_states(X)
TM = hmm.transitions.transition_matrix
run_on = time.asctime(time.localtime(time.time()))
run_from = os.getcwd()
コード例 #22
0
ファイル: GLM-HHM.py プロジェクト: alejandropan/rewardworld
import matplotlib.pyplot as plt
import ssm
from ssm.util import one_hot, find_permutation
import statsmodels.api as sm
from scipy.stats import bernoulli

npr.seed(0)

# Set the parameters of the GLM-HMM
num_states = 2        # number of discrete states
obs_dim = 1           # number of observed dimensions
num_categories = 2    # number of categories for output
input_dim = 36         # input dimensions

# Make a GLM-HMM
true_glmhmm = ssm.HMM(num_states, obs_dim, input_dim, observations="input_driven_obs",
                   observation_kwargs=dict(C=num_categories), transitions="standard")

# Put some toy parameters for simulations
gen_weights = np.array([[[2.6, 1.3, 0.3, 0.2, 0.1, 0.01, 0.01, 0.01, 0.01, 0.01,
                          0.06, 0.03, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01,
                          1, 0.6, 0.2, 0.1, 0.02,
                          0.8, 0.4, 0.1, 0.05, 0.01,
                          0.01, 0.01, 0.01, 0.01, 0.01,
                          0.3]],
                        [[2.0, 1.6, 1.0, 0.6, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25,
                          2.0, 1.6, 1.0, 0.6, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25,
                          0.6, 0.2, 0.15, 0.15, 0.05,
                          0.6, 0.2, 0.15, 0.15, 0.05,
                          0.8, 0.6, 0.3, 0.1, 0.01,
                          0.2]]])
コード例 #23
0
ファイル: hmm.py プロジェクト: tcsvn/pyadlml
 def _model_init(self, dataset):
     K = len(self._state_list)  # number of discrete states
     D = len(self._observation_list)  # dimension of the observation
     self._hmm = ssm.HMM(K, D, observations='bernoulli')
コード例 #24
0
            training_inpt = list(map(meta_inpts.__getitem__, train_idx))
            training_choice = list(
                map(meta_true_choices.__getitem__, train_idx))
            test_inpt = list(map(meta_inpts.__getitem__, test_idx))
            test_choices = list(map(meta_true_choices.__getitem__, test_idx))
            for i in np.arange(1, 4):
                num_states = i
                obs_dim = 1
                input_dim = 5
                num_categories = 2
                i_ll = []
                for p in np.arange(20):
                    bandit_glmhmm = ssm.HMM(
                        num_states,
                        obs_dim,
                        input_dim,
                        observations="input_driven_obs",
                        observation_kwargs=dict(C=num_categories),
                        transitions="standard")
                    fit_ll = bandit_glmhmm.fit(training_choice,
                                               inputs=training_inpt,
                                               method="em",
                                               num_iters=N_iters,
                                               tolerance=10**-4)
                    i_ll.append(
                        bandit_glmhmm.log_likelihood(test_choices,
                                                     inputs=test_inpt))
                lls.append(np.mean(i_ll))
                lls_var.append(np.stddev(i_ll))
            log_likelihoods[:, fold] = lls
コード例 #25
0
import numpy 
import ssm
from scipy import io
import time
import os

# Build an HMM instance and set parameters
#np.random.seed(1)
num_states = 60    # number of discrete states
observation_class = 'autoregressive'
obs_dim = 14       # dimensionality of observation
transitions = 'sticky'
kappa = 1E14
AR_lags =  20
hmm = ssm.HMM(num_states, obs_dim,
              observations=observation_class, observation_kwargs={'lags':AR_lags},
              transitions=transitions, transition_kwargs={'kappa': kappa})
print([num_states, kappa, AR_lags])

#load data using loadmat
mat=io.loadmat('training_data.mat') 
#mat=io.loadmat('C:/Users/Kat/Resilio Sync/Prey Capture/state_epoch_clips-06-Jan-2021/training_data.mat') 
X = mat['X']

#fit hmm to data
N_iters=20
#N_iters=10
hmm_lls = hmm.fit(X, method="em", num_iters=N_iters)
Z = hmm.most_likely_states(X)
Ps = hmm.expected_states(X)
TM = hmm.transitions.transition_matrix
コード例 #26
0
def fit_hmm (obs, num_states=2, obs_dim=1, obs_dist = 'gaussian'):
    # Make an HMM
    hmm = ssm.HMM(num_states, obs_dim, observations=obs_dist)
    hmm_lls = hmm.fit(obs, method="em")
    return hmm, hmm_lls[-1]
コード例 #27
0
    trial = conditioned_trials[i]
    visual_time = trials['visStim_times'][trial]
    cue_time = trials['cue_times'][trial]
    feedback_time = trials['feedback_times'][trial]

    # generate the spike count histograms
    t0 = visual_time - pre_stim_dt
    tf = feedback_time + post_resp_dt
    [dataset,
     time_bins] = generate_spike_counts(recording_name, brain_region,
                                        neuron_min_score, bin_dt, t0, tf)
    (n_neurons, n_bins) = dataset.shape

    # Create a hmm model
    train_data = dataset.astype(int).T
    model = ssm.HMM(N_states, n_neurons, observations="poisson")
    hmm_lls = model.fit(train_data, method="em", num_iters=1000)
    posterior = model.filter(train_data)
    # states = model.most_likely_states(train_data)

plt.figure(n_trials, figsize=[9, 5])
for s in range(N_states):
    plt.plot(posterior[:, s], label="State %d" % s)

plt.suptitle('Posterior probability of latent states')
plt.xlabel(f'time bin ({int(bin_dt*1000)} ms)')
plt.ylabel('probability')

plt.legend()
plt.show()
コード例 #28
0
# Set the parameters of the HMM
T = X.shape[0]      # number of time bins
K = 5       # number of discrete states
D = X.shape[1]       # number of observed dimensions


# Fit with both SGD and EM
#methods = ["sgd", "em"]
methods = ["em"]

results = {}
for obs in observations:
    for method in methods:
        print("Fitting {} HMM with {}".format(obs, method))
        model = ssm.HMM(K, D, observations=obs)
        train_lls = model.fit(X, method=method)
        #test_ll = model.log_likelihood(y_test)
        smoothed_X = model.smooth(X)

        # Permute to match the true states
        #model.permute(find_permutation(z, model.most_likely_states(y)))
        smoothed_z = model.most_likely_states(X)
        results[(obs, method)] = (model, train_lls, smoothed_z, smoothed_X)


# Plot the inferred states
fig, axs = plt.subplots(len(observations), 1, figsize=(12, 8))

# Plot the inferred states
for i, obs in enumerate(observations):
コード例 #29
0
def main(hparams):

    if not isinstance(hparams, dict):
        hparams = vars(hparams)

    # print hparams to console
    _print_hparams(hparams)

    # start at random times (so test tube creates separate folders)
    np.random.seed(random.randint(0, 1000))
    time.sleep(np.random.uniform(1))

    # create test-tube experiment
    hparams, sess_ids, exp = create_tt_experiment(hparams)
    if hparams is None:
        print('Experiment exists! Aborting fit')
        return

    # build data generator
    data_generator = build_data_generator(hparams, sess_ids)

    # ####################
    # ### CREATE MODEL ###
    # ####################

    # get all latents in list
    n_datasets = len(data_generator)
    print('collecting observations from data generator...', end='')
    data_key = 'ae_latents'
    if hparams['model_class'].find('labels') > -1:
        data_key = 'labels'
    latents, trial_idxs = get_latent_arrays_by_dtype(
        data_generator, sess_idxs=list(range(n_datasets)), data_key=data_key)
    obs_dim = latents['train'][0].shape[1]

    hparams['total_train_length'] = np.sum([l.shape[0] for l in latents['train']])
    # get separated by dataset as well
    latents_sess = {d: None for d in range(n_datasets)}
    trial_idxs_sess = {d: None for d in range(n_datasets)}
    for d in range(n_datasets):
        latents_sess[d], trial_idxs_sess[d] = get_latent_arrays_by_dtype(
            data_generator, sess_idxs=d, data_key=data_key)
    print('done')

    if hparams['model_class'] == 'arhmm' or hparams['model_class'] == 'hmm':
        hparams['ae_model_path'] = os.path.join(
            os.path.dirname(data_generator.datasets[0].paths['ae_latents']))
        hparams['ae_model_latents_file'] = data_generator.datasets[0].paths['ae_latents']

    # collect model constructor inputs
    if hparams['noise_type'] == 'gaussian':
        if hparams['n_arhmm_lags'] > 0:
            if hparams['model_class'][:5] != 'arhmm':  # 'arhmm' or 'arhmm-labels'
                raise ValueError('Must specify model_class as arhmm when using AR lags')
            obs_type = 'ar'
        else:
            if hparams['model_class'][:3] != 'hmm':  # 'hmm' or 'hmm-labels'
                raise ValueError('Must specify model_class as hmm when using 0 AR lags')
            obs_type = 'gaussian'
    elif hparams['noise_type'] == 'studentst':
        if hparams['n_arhmm_lags'] > 0:
            if hparams['model_class'][:5] != 'arhmm':  # 'arhmm' or 'arhmm-labels'
                raise ValueError('Must specify model_class as arhmm when using AR lags')
            obs_type = 'robust_ar'
        else:
            if hparams['model_class'][:3] != 'hmm':  # 'hmm' or 'hmm-labels'
                raise ValueError('Must specify model_class as hmm when using 0 AR lags')
            obs_type = 'studentst'
    else:
        raise ValueError('%s is not a valid noise type' % hparams['noise_type'])

    if hparams['n_arhmm_lags'] > 0:
        obs_kwargs = {'lags': hparams['n_arhmm_lags']}
        obs_init_kwargs = {'localize': True}
    else:
        obs_kwargs = None
        obs_init_kwargs = {}
    if hparams['kappa'] == 0:
        transitions = 'stationary'
        transition_kwargs = None
    else:
        transitions = 'sticky'
        transition_kwargs = {'kappa': hparams['kappa']}

    print('constructing model...', end='')
    np.random.seed(hparams['rng_seed_model'])
    hmm = ssm.HMM(
        hparams['n_arhmm_states'], obs_dim,
        observations=obs_type, observation_kwargs=obs_kwargs,
        transitions=transitions, transition_kwargs=transition_kwargs)
    hmm.initialize(latents['train'])
    hmm.observations.initialize(latents['train'], **obs_init_kwargs)
    # save out hparams as csv and dict
    hparams['training_completed'] = False
    export_hparams(hparams, exp)
    print('done')

    # ####################
    # ### TRAIN MODEL ###
    # ####################

    # TODO: move fitting into own function
    # TODO: adopt early stopping strategy from ssm
    # precompute normalizers
    n_datapoints = {}
    n_datapoints_sess = {}
    for dtype in {'train', 'val', 'test'}:
        n_datapoints[dtype] = np.vstack(latents[dtype]).size
        n_datapoints_sess[dtype] = {}
        for d in range(n_datasets):
            n_datapoints_sess[dtype][d] = np.vstack(latents_sess[d][dtype]).size

    for epoch in range(hparams['n_iters'] + 1):
        # Note: the 0th epoch has no training (randomly initialized model is evaluated) so we cycle
        # through `n_iters` training epochs

        print('epoch %03i/%03i' % (epoch, hparams['n_iters']))
        if epoch > 0:
            hmm.fit(latents['train'], method='em', num_iters=1, initialize=False)

        # export aggregated metrics on train/val data
        tr_ll = hmm.log_likelihood(latents['train']) / n_datapoints['train']
        val_ll = hmm.log_likelihood(latents['val']) / n_datapoints['val']
        exp.log({
            'epoch': epoch, 'dataset': -1, 'tr_loss': tr_ll, 'val_loss': val_ll, 'trial': -1})

        # export individual session metrics on train/val data
        for d in range(data_generator.n_datasets):
            tr_ll = hmm.log_likelihood(latents_sess[d]['train']) / n_datapoints_sess['train'][d]
            val_ll = hmm.log_likelihood(latents_sess[d]['val']) / n_datapoints_sess['val'][d]
            exp.log({
                'epoch': epoch, 'dataset': d, 'tr_loss': tr_ll, 'val_loss': val_ll, 'trial': -1})

    # export individual session metrics on test data
    for d in range(n_datasets):
        for i, b in enumerate(trial_idxs_sess[d]['test']):
            n = latents_sess[d]['test'][i].size
            test_ll = hmm.log_likelihood(latents_sess[d]['test'][i]) / n
            exp.log({'epoch': epoch, 'dataset': d, 'test_loss': test_ll, 'trial': b})
    exp.save()

    # reconfigure model/states by usage
    zs = [hmm.most_likely_states(x) for x in latents['train']]
    usage = np.bincount(np.concatenate(zs), minlength=hmm.K)
    perm = np.argsort(usage)[::-1]
    hmm.permute(perm)

    # save model
    filepath = os.path.join(hparams['expt_dir'], 'version_%i' % exp.version, 'best_val_model.pt')
    with open(filepath, 'wb') as f:
        pickle.dump(hmm, f)   

    # ######################
    # ### EVALUATE ARHMM ###
    # ######################

    # export states
    if hparams['export_states']:
        export_states(hparams, data_generator, hmm)

    # export training plots
    if hparams['export_train_plots']:
        print('creating training plots...', end='')
        version_dir = os.path.join(hparams['expt_dir'], 'version_%i' % hparams['version'])
        save_file = os.path.join(version_dir, 'loss_training')
        export_train_plots(hparams, 'train', loss_type='ll', save_file=save_file)
        save_file = os.path.join(version_dir, 'loss_validation')
        export_train_plots(hparams, 'val', loss_type='ll', save_file=save_file)
        print('done')

    # update hparams upon successful training
    hparams['training_completed'] = True
    export_hparams(hparams, exp)

    # get rid of unneeded logging info
    _clean_tt_dir(hparams)
コード例 #30
0
    p = np.array([np.real(v)**2 + np.imag(v)**2 for v in f])
    pi = np.fft.ifft(p)
    return np.real(pi)[:int(x.size / 2)] / np.sum(xp**2)


plt.figure()
plt.plot(autocorrelation(rt[1, :])[:5000])

# %% Dwell-time calculation!
import ssm
# %% based on HMM
obs = rt[:4, :].T
obs_dims = obs.shape[1]
N_iters = 50
num_states = 2  #assuming transition between two patterns
hmm = ssm.HMM(num_states, obs_dims, num_iters='gaussian')
hmm_lls = hmm.fit(obs, method='em', num_iters=N_iters)

# %%
plt.figure()
plt.plot(hmm_lls)
most_lls = hmm.most_likely_states(obs)
plt.figure()
plt.subplot(211)
plt.plot(most_lls)
plt.xlim([0, len(most_lls)])
plt.subplot(212)
plt.imshow(obs.T, aspect='auto')

learned_transition_mat = hmm.transitions.transition_matrix
print(learned_transition_mat)