def __call__(self): hmmdata = dict() for (name, tracks, probs, objids, coords) in \ self.dtable.iterby(self.ecopts.sortby, True): if tracks is probs is None: hmmdata[name] = None continue labelmapper = LabelMapper(np.unique(tracks), self.classdef.class_names.keys()) # np.unique -> sorted ndarray idx = labelmapper.index_from_classdef(np.unique(tracks)) idx.sort() # no prediction probabilities available if probs is not None: probs = probs[:, :, idx] est = self._get_estimator(probs, labelmapper.label2index(tracks)) est.constrain(self.hmmc(est, labelmapper)) # ugly sklearn hmm_ = MultinomialHMM(n_components=est.nstates) hmm_.startprob_ = est.startprob hmm_.transmat_ = est.trans hmm_.emissionprob_ = est.emis tracks2 = [] for track in labelmapper.label2index(tracks): tracks2.append(hmm_.predict(track)) tracks2 = labelmapper.index2labels(np.array(tracks2, dtype=int)) bucket = HmmBucket(tracks, tracks2, est.startprob, est.emis, est.trans, self.dtable.groups(self.ecopts.sortby, name), tracks.shape[0], objids, coords, self.ecopts.timelapse) hmmdata[name] = bucket return hmmdata
def __init__(self, states, estimator, tracks): # tracks have been mapped to array indices already super(HMMBaumWelchEstimator, self).__init__(states) self._trans = estimator.trans self._emis = estimator.emis self._startprob = estimator.startprob # the initialisation is essential! hmm_ = MultinomialHMM(n_components=estimator.nstates, transmat=estimator.trans, startprob=estimator.startprob, n_iter=1000, init_params="") hmm_.emissionprob_ = estimator.emis hmm_.fit(tracks) self._trans = hmm_.transmat_ self._emis = hmm_.emissionprob_ self._startprob = hmm_.startprob_