def __init__(self, species_name=None, block_size=1, transfer_emissions=np.array([0.1]), transfer_rate=1e-2, clonal_emission=1e-3, transfer_length=5e2, transition_prior=None, algorithm="viterbi", n_iter=10, tol=1e-2, verbose=False, params="m"): if species_name is not None: self.transfer_emissions, self.transition_prior = self.get_empirical_emissions( species_name, block_size) else: self._init_emissions_manual(transfer_emissions, transition_prior) n_components = 1 + len(self.transfer_emissions) # normalizing the transition prior self.transition_prior = self.transition_prior.astype( np.float32) / np.sum(self.transition_prior) self.transfer_rate = transfer_rate self.clonal_emission = clonal_emission self.exit_rate = 1. / transfer_length # rate of leaving the transferred state self.all_emissions = np.concatenate([[self.clonal_emission], self.transfer_emissions]) _BaseHMM.__init__(self, n_components, algorithm=algorithm, n_iter=n_iter, tol=tol, verbose=verbose, params=params)
def __init__(self, n_components=1, startprob=None, transmat=None, startprob_prior=None, transmat_prior=None, algorithm="viterbi", random_state=None, n_iter=10, thresh=1e-2, params=string.ascii_letters, init_params=string.ascii_letters): """Create a hidden Markov model with multinomial emissions. Parameters ---------- n_components : int Number of states. """ _BaseHMM.__init__(self, n_components, startprob, transmat, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, thresh=thresh, params=params, init_params=init_params)
def __init__(self, mlp, aucoustic_model, observation_count, n_components=1, startprob_prior=1.0, transmat_prior=1.0, algorithm="viterbi", random_state=None, n_iter=10, tol=1e-2, verbose=False, params="stmc", init_params="stmc"): _BaseHMM.__init__(self, n_components, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, tol=tol, params=params, verbose=verbose, init_params=init_params) self.aucoustic_model = aucoustic_model self.observation_count = observation_count self.mlp = mlp self.mlp.info()
def __init__(self, n_components: int=1, startprob_prior:float=1.0, transmat_prior:float=1.0, diffusion_degrees_of_freedom:int = 4, tau:float = 0.02, algorithm:str="viterbi", random_state:int=42, n_iter:int=10, tol:float=1e-2, verbose:bool=False, params:str="std", init_params:str="std"): """ Parameters ---------- n_components : int, optional Number of states. The default is 1. startprob_prior : float, optional Parameters of the Dirichlet prior distribution for startprob_. The default is 1.0. transmat_prior : float, optional Parameters of the Dirichlet prior distribution for each row of the transition probabilities transmat_. The default is 1.0. diffusion_degrees_of_freedom : int, optional Translational degrees of freedom. The default is 4. tau : float, optional Time interval between two consecutive measurements. The default is 0.02. algorithm : str, optional Decoder algorithm. The default is "viterbi". random_state : int, optional A random number generator instance. The default is 42. n_iter : int, optional Maximum number of iterations to perform. The default is 10. tol : float, optional Convergence threshold. EM will stop if the gain in log-likelihood is below this value. The default is 1e-2. verbose : bool, optional Whether per-iteration convergence reports are printed to sys.stderr. Convergence can also be diagnosed using the monitor_ attribute. The default is False. params : str, optional The parameters that get updated during training. Can contain any combination of ‘s’ for startprob, ‘t’ for transmat, ‘d’ for diffusion coefficients. Defaults to all parameters. The default is "std". init_params : str, optional The parameters that get initialized prior to training. Can contain any combination of ‘s’ for startprob, ‘t’ for transmat, ‘d’ for diffusion coefficients. Defaults to all parameters. The default is "std". Returns ------- None. """ _BaseHMM.__init__(self, n_components=n_components, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, tol=tol, params=params, verbose=verbose, init_params=init_params) self._tau_ = tau self._diffusion_degrees_of_freedom_ = diffusion_degrees_of_freedom self._diffusion_coefficients_ = np.zeros([self.n_components]) self._mu_ = np.zeros([self.n_components]) self.n_features = 1
def __init__(self, n_components=2, startprob_prior=1.0, transmat_prior=1.0, algorithm="viterbi", random_state=None, n_iter=10, tol=1e-2, verbose=False, params="str", init_params="str"): _BaseHMM.__init__(self, n_components, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, tol=tol, verbose=verbose, params=params, init_params=init_params)
def __init__(self, n_components=1, startprob_prior=1.0, transmat_prior=1.0, algorithm='viterbi', random_state=None, n_iter=10, tol=1e-2, verbose=False, params="ste", init_params="ste"): _BaseHMM.__init__(self, n_components, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, tol=tol, verbose=verbose, params=params, init_params=init_params)
def __init__(self, n_components=1, loc_covariance_type='full', time_covariance_type='full', loc_min_covar=1e-3, time_min_covar=1e-3, startprob_prior=1.0, transmat_prior=1.0, loc_means_prior=0, loc_means_weight=0, loc_covars_prior=1e-2, loc_covars_weight=1, time_means_prior=0, time_means_weight=0, time_covars_prior=1e-2, time_covars_weight=1, algorithm="viterbi", random_state=None, n_iter=10, tol=1e-2, verbose=False, params="stmc", init_params="stmc", weights=None): _BaseHMM.__init__(self, n_components, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, tol=tol, params=params, verbose=verbose, init_params=init_params) self.loc_covariance_type = loc_covariance_type self.time_covariance_type = time_covariance_type self.loc_min_covar = loc_min_covar self.time_min_covar = time_min_covar self.loc_means_prior = loc_means_prior self.time_means_prior = time_means_prior self.loc_means_weight = loc_means_weight self.time_means_weight = time_means_weight self.loc_covars_prior = loc_covars_prior self.time_covars_prior = time_covars_prior self.loc_covars_weight = loc_covars_weight self.time_covars_weight = time_covars_weight self._weights = weights self.X_loc = []
def __init__(self, n_components=1, n_mix=1, startprob=None, transmat=None, startprob_prior=None, transmat_prior=None, algorithm="viterbi", gmms=None, covariance_type='diag', covars_prior=1e-2, random_state=None, n_iter=10, thresh=1e-2, params=string.ascii_letters, init_params=string.ascii_letters): """Create a hidden Markov model with GMM emissions. Parameters ---------- n_components : int Number of states. """ _BaseHMM.__init__(self, n_components, startprob, transmat, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, thresh=thresh, params=params, init_params=init_params) # XXX: Hotfit for n_mix that is incompatible with the scikit's # BaseEstimator API self.n_mix = n_mix self._covariance_type = covariance_type self.covars_prior = covars_prior self.gmms = gmms if gmms is None: gmms = [] for x in range(self.n_components): if covariance_type is None: g = GMM(n_mix) else: g = GMM(n_mix, covariance_type=covariance_type) gmms.append(g) self.gmms_ = gmms
def __init__(self, n_components=1, covariance_type='diag', startprob=None, transmat=None, startprob_prior=None, transmat_prior=None, algorithm="viterbi", means_prior=0, means_weight=0, covars_prior=1e-2, covars_weight=1, random_state=None, n_iter=10, thresh=1e-2, params=string.ascii_letters, init_params=string.ascii_letters): _BaseHMM.__init__(self, n_components, startprob, transmat, startprob_prior=startprob_prior, transmat_prior=transmat_prior, algorithm=algorithm, random_state=random_state, n_iter=n_iter, thresh=thresh, params=params, init_params=init_params) self._covariance_type = covariance_type if covariance_type not in ['spherical', 'tied', 'diag', 'full']: raise ValueError('bad covariance_type') self.means_prior = means_prior self.means_weight = means_weight self.covars_prior = covars_prior self.covars_weight = covars_weight
def __init__(self, framelogprob, rates, M, *args, **kwargs): _BaseHMM.__init__(self, *args, **kwargs) # rates for each state self.rates = rates self.M = M self.framelogprob = framelogprob