Esempio n. 1
0
 def _do_backward_pass(self, framelogprob):
     n_samples, n_components = framelogprob.shape
     bwdlattice = np.zeros((n_samples, n_components))
     _hmmc._backward(n_samples, n_components,
                     log_mask_zero(self.startprob_),
                     log_mask_zero(self.transmat_), framelogprob,
                     bwdlattice)
     return bwdlattice
Esempio n. 2
0
def _do_backward_pass(log_startprob, log_transmat, framelogprob):
    n_samples, n_components = framelogprob.shape
    bwdlattice = np.zeros((n_samples, n_components))
    _hmmc._backward(
        n_samples, n_components, log_startprob, log_transmat, framelogprob,
        bwdlattice
    )
    return bwdlattice
Esempio n. 3
0
 def _do_backward_pass(self, framelogprob):
     # Based on hmmlearn's _BaseHMM
     safe_startmat = self.startprob_ + np.finfo(float).eps
     safe_transmat = self.transmat_ + np.finfo(float).eps
     n_samples, n_components = framelogprob.shape
     bwdlattice = np.zeros((n_samples, n_components))
     _hmmc._backward(n_samples, n_components, np.log(safe_startmat),
                     np.log(safe_transmat), framelogprob, bwdlattice)
     return bwdlattice
Esempio n. 4
0
 def _do_backward_pass(self, framelogprob):
     # Based on hmmlearn's _BaseHMM
     safe_startmat = self.startprob_ + np.finfo(float).eps
     safe_transmat = self.transmat_ + np.finfo(float).eps
     n_samples, n_components = framelogprob.shape
     bwdlattice = np.zeros((n_samples, n_components))
     _hmmc._backward(n_samples, n_components,
                     np.log(safe_startmat),
                     np.log(safe_transmat),
                     framelogprob, bwdlattice)
     return bwdlattice
Esempio n. 5
0
                               int(n_components), \
                               torch.from_numpy(np.log(startprob)), \
                               torch.from_numpy(np.log(transmat)), \
                               torch.from_numpy(data), \
                               torch.from_numpy(mask))
print("torch batch forward: \n {}\n".format(torch_fwdlattice))
print("torch batch logprob: {}\n".format(torch_logprob))

# 3. backward comparison

hmmlearn_bwdlattice = []
for idx, framelogprob in enumerate(data):
    bwdlattice = np.zeros((mask[idx].sum(), n_components))
    _hmmc._backward(int(mask[idx].sum()), \
                   int(n_components), \
                   np.log(startprob), \
                   np.log(transmat), \
                   framelogprob[mask[idx]>0], \
                   bwdlattice)
    hmmlearn_bwdlattice.append(bwdlattice)
print("hmmlearn backward lattice: \n{}".format(hmmlearn_bwdlattice))

# 3.2 batch backward in PyTorch

torch_bwdlattice = _backward(int(data.shape[1]), \
                             int(n_components), \
                             torch.from_numpy(np.log(startprob)), \
                             torch.from_numpy(np.log(transmat)), \
                             torch.from_numpy(data), \
                             torch.from_numpy(mask))
print("torch batch backward: \n{}".format(torch_bwdlattice))
Esempio n. 6
0
 def _do_backward_pass(self, framelogprob):
     n_observations, n_components = framelogprob.shape
     bwdlattice = np.zeros((n_observations, n_components))
     _hmmc._backward(n_observations, n_components, self._log_startprob,
                     self._log_transmat, framelogprob, bwdlattice)
     return bwdlattice