def __init__(self, n, m, d=1, A=None, means=None, covars=None, w=None, pi=None, min_std=0.01, init_type='uniform', precision=numpy.double, verbose=False): ''' Construct a new Continuous HMM. In order to initialize the model with custom parameters, pass values for (A,means,covars,w,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. ''' _BaseHMM.__init__(self, n, m, precision, verbose) #@UndefinedVariable self.d = d self.A = A self.pi = pi self.means = means self.covars = covars self.w = w self.min_std = min_std self.reset(init_type=init_type)
def __init__(self,n,m,d=1,A=None,means=None,covars=None,w=None,pi=None,min_std=0.01,init_type='uniform',precision=numpy.double,verbose=False): ''' Construct a new Continuous HMM. In order to initialize the model with custom parameters, pass values for (A,means,covars,w,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. ''' _BaseHMM.__init__(self,n,m,precision,verbose) #@UndefinedVariable self.d = d self.A = A self.pi = pi self.means = means self.covars = covars self.w = w self.min_std = min_std self.reset(init_type=init_type) ''' flag to load some decoding info from cached files, for example bMap and durationLookup table . makes decoding faster ''' self.usePersistentFiles = True self.logger = logging.getLogger(__name__) self.logger.setLevel(logging.DEBUG)
def __init__( self, n, m, d=1, A=None, means=None, covars=None, w=None, pi=None, min_std=0.01, init_type="uniform", precision=numpy.double, verbose=False, ): """ Construct a new Continuous HMM. In order to initialize the model with custom parameters, pass values for (A,means,covars,w,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. """ _BaseHMM.__init__(self, n, m, precision, verbose) # @UndefinedVariable self.d = d self.A = A self.pi = pi self.means = means self.covars = covars self.w = w self.min_std = min_std self.reset(init_type=init_type)
def __init__(self,n,m,d=1,A=None,means=None,covars=None,w=None,pi=None,min_std=0.01,init_type='uniform',precision=numpy.double,verbose=False): ''' Construct a new Continuous HMM. In order to initialize the model with custom parameters, pass values for (A,means,covars,w,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. ''' _BaseHMM.__init__(self,n,m,precision,verbose) #@UndefinedVariable self.d = d self.A = A self.pi = pi self.means = means self.covars = covars self.w = w self.min_std = min_std self.reset(init_type=init_type) ''' flag to load some decoding info from cached files, for example bMap and durationLookup table makes decoding faster ''' self.usePersistentFiles = False
def __init__(self, number_outputs, n, locations_per_segment, start_t, end_t, number_latent_forces=1, precision=np.double, verbose=False): assert n > 0 assert locations_per_segment > 0 assert number_outputs > 0 assert type(number_outputs) is type(1) self.n = n # number of hidden states self.number_outputs = number_outputs self.start_t = start_t self.end_t = end_t self.sample_locations = np.linspace(start_t, end_t, locations_per_segment) self.locations_per_segment = locations_per_segment # Pool of workers to perform parallel computations when need it. self.pool = mp.Pool() # covariance memoization self.memo_covs = {} self.number_latent_f = number_latent_forces # initially not LFM params. self.LFMparams = {} # Latent Force Model objects self.lfms = np.zeros(n, dtype='object') for i in xrange(n): self.lfms[i] = lfm2(self.number_latent_f, number_outputs) self.lfms[i].set_inputs_with_same_ind(self.sample_locations) self._INDEPENDENT_OUTPUTS = False _BaseHMM.__init__(self, n, None, precision, verbose) self.reset()
def __init__(self, number_outputs, n, locations_per_segment, start_t, end_t, precision=np.double, verbose=False): assert n > 0 assert locations_per_segment > 0 assert number_outputs > 0 assert type(number_outputs) is type(1) self.n = n # number of hidden states self.number_outputs = number_outputs self.start_t = start_t self.end_t = end_t self.sample_locations = np.linspace(start_t, end_t, locations_per_segment) self.locations_per_segment = locations_per_segment # Pool of workers to perform parallel computations when need it. self.pool = mp.Pool() # covariance memoization self.memo_covs = {} self.ICMparams = {} # different icms. self.icms = np.zeros(n, dtype='object') for i in xrange(n): self.icms[i] = icm(number_outputs, self.sample_locations) _BaseHMM.__init__(self, n, None, precision, verbose) self.reset()
def __init__(self,n,m,A=None,B=None,pi=None,init_type='uniform',precision=numpy.double,verbose=False): ''' Construct a new Discrete HMM. In order to initialize the model with custom parameters, pass values for (A,B,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. ''' _BaseHMM.__init__(self,n,m,precision,verbose) #@UndefinedVariable self.A = A self.pi = pi self.B = B self.reset(init_type=init_type)
def __init__(self, n, m, A=None, B=None, pi=None, init_type='uniform', precision=numpy.double, verbose=False): ''' Construct a new Discrete HMM. In order to initialize the model with custom parameters, pass values for (A,B,pi), and set the init_type to 'user'. Normal initialization uses a uniform distribution for all probablities, and is not recommended. ''' _BaseHMM.__init__(self, n, m, precision, verbose) #@UndefinedVariable self.A = A self.pi = pi self.B = B self.reset(init_type=init_type)