Exemple #1
0
 def __init__(self, letter_type, rho=1.0, alpha=1.0, gamma=1.0, lmbda=4.0):
     self.letter_dim = letter_type
     self.init_dist = InitialState(state_dim=letter_type, rho=rho)
     self.letter_trans = WordTransitions(state_dim=letter_type,
                                         alpha=alpha,
                                         gamma=gamma)
     self.letter_dur = PoissonDuration(lmbda=lmbda)
Exemple #2
0
class WordModel(object):
    def __init__(self, letter_type, rho=1.0, alpha=1.0, gamma=1.0, lmbda=4.0):
        self.letter_dim = letter_type
        self.init_dist = InitialState(state_dim=letter_type, rho=rho)
        self.letter_trans = WordTransitions(state_dim=letter_type,
                                            alpha=alpha,
                                            gamma=gamma)
        self.letter_dur = PoissonDuration(lmbda=lmbda)

    def resample(self, data):
        self.letter_trans.resample(data)
        self.init_dist.resample([word[:1] for word in data])

    def generate(self):
        word_size = self.letter_dur.rvs() or 1
        next_state_dist = self.init_dist.pi_0
        ret = []

        for i in range(word_size):
            next_state = sample_discrete(next_state_dist)
            ret.append(next_state)
            next_state_dist = self.letter_trans.A[next_state]

        return tuple(ret)
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04,
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04,
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04,
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04,
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04,
               9.56754688e-04, 9.56754688e-04, 9.56754688e-04, 9.56754688e-04
           ]]

# After setting the parameters and implementing a MixtureDistribution in the pybasicbayes package we can start defining our HSMM.

# In[13]:

dur_distns = []
for state in range(len(state_list)):
    dur_distns.append(
        PoissonDuration(alpha_0=alpha_0s[state], beta_0=beta_0s[state]))

#dur_distns = [PoissonDuration(alpha_0=alpha_0s[state], beta_0=beta_0s[state])
#              for state in range(len(state_list))]

dist_obs_map = [0, 0, 0, 1]
distv = [
    Gaussian(mu_0=mu_0s, sigma_0=sigma_0s, kappa_0=kappa_0, nu_0=nu_0),
    Categorical(weights=weights, K=len(weights), alpha_0=5)
]

# In[14]:

np.unique([1, 1, 3, 4, 2, 1, 3, 5])

# In[3]:
Exemple #4
0
 def resample_dur_dists(self):
     self.word_dur_dists = [
         PoissonDuration(lmbda=np.sum([self.dur_distns[c].lmbda
                                       for c in w])) for w in self.word_list
     ]
Exemple #5
0

dur_distns = []
gaussians = []
categoricals = []
mixtures = []
alpha_0s = np.array(alpha_0s)
beta_0s = np.array(beta_0s)
mu_0s = np.array(mu_0s)
sigma_0s = np.array(sigma_0s)
weights = np.array(weights)

dist_obs_map = [0, 0, 0, 1]

for state in range(len(state_list)):
    dur_distns.append(PoissonDuration(alpha_0=alpha_0s[state], beta_0=beta_0s[state]))
    # dur_distns.append(NegativeBinomialDuration(r=r_s[state], p=p_s[state], k_0=k_0s[state], theta_0=theta_0s[state],
    #                                           alpha_0=alpha_0s[state],
    #                                           beta_0=beta_0s[state]))
    gaussians.append(Gaussian(mu_0=mu_0s[state], sigma_0=sigma_0s[state], kappa_0=kappa_0, nu_0=nu_0))
    categoricals.append(Categorical(weights=weights[state, :], K=weights.shape[1], alpha_0=alpha_0))
    mixtures.append(DistributionMixture(distv=[gaussians[-1], categoricals[-1]], dist_obs_map=dist_obs_map))

distv = [gaussians, categoricals]

tmat = [
    [0.000, 0.760, 0.010, 0.010, 0.210, 0.010],  # DX
    [0.010, 0.000, 0.430, 0.430, 0.120, 0.010],  # place tool
    [0.010, 0.450, 0.000, 0.250, 0.250, 0.040],  # cutting loop
    [0.010, 0.530, 0.210, 0.000, 0.200, 0.050],  # coag loop
    [0.010, 0.450, 0.200, 0.250, 0.000, 0.050],  # clear view
Exemple #6
0
D_latent = 2  # latent linear dynamics' dimension
D_input = 1  # latent linear dynamics' dimension
D_obs = 2  # data dimension
N_iter = 200  # number of VBEM iterations

As = [
    np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta),
                                                np.cos(theta)]])
    for alpha, theta in ((0.95, 0.1), (0.95, -0.1), (1., 0.))
]

truemodel = ARHSMM(
    alpha=4.,
    init_state_concentration=4.,
    obs_distns=[AutoRegression(A=A, sigma=0.05 * np.eye(2)) for A in As],
    dur_distns=[PoissonDuration(alpha_0=3 * 50, beta_0=3) for _ in As])

truemodel.prefix = np.array([[0., 3.]])
data, labels = truemodel.generate(T)
data = data[truemodel.nlags:]

plt.figure()
plt.plot(data[:, 0], data[:, 1], 'x-')

#################
#  build model  #
#################
Cs = [np.eye(D_obs) for _ in range(Kmax)]  # Shared emission matrices
sigma_obss = [0.05 * np.eye(D_obs)
              for _ in range(Kmax)]  # Emission noise covariances