def __init__(self, vars=None, out_vars=None, covariance=None, scale=1., n_chains=100, tune=True, tune_interval=100, model=None, check_bound=True, likelihood_name='like', backend='csv', proposal_name='MultivariateNormal', **kwargs): model = modelcontext(model) if vars is None: vars = model.vars vars = inputvars(vars) if out_vars is None: out_vars = model.unobserved_RVs out_varnames = [out_var.name for out_var in out_vars] self.scaling = utility.scalar2floatX(num.atleast_1d(scale)) self.tune = tune self.check_bound = check_bound self.tune_interval = tune_interval self.steps_until_tune = tune_interval self.stage_sample = 0 self.cumulative_samples = 0 self.accepted = 0 self.beta = 1. self.stage = 0 self.chain_index = 0 # needed to use the same parallel implementation function as for SMC self.resampling_indexes = num.arange(n_chains) self.n_chains = n_chains self.likelihood_name = likelihood_name self._llk_index = out_varnames.index(likelihood_name) self.backend = backend self.discrete = num.concatenate( [[v.dtype in discrete_types] * (v.dsize or 1) for v in vars]) self.any_discrete = self.discrete.any() self.all_discrete = self.discrete.all() # create initial population self.population = [] self.array_population = num.zeros(n_chains) logger.info('Creating initial population for {}' ' chains ...'.format(self.n_chains)) for i in range(self.n_chains): self.population.append( Point({v.name: v.random() for v in vars}, model=model)) self.population[0] = model.test_point shared = make_shared_replacements(vars, model) self.logp_forw = logp_forw(out_vars, vars, shared) self.check_bnd = logp_forw([model.varlogpt], vars, shared) super(Metropolis, self).__init__(vars, out_vars, shared) # init proposal if covariance is None and proposal_name in multivariate_proposals: t0 = time() self.covariance = init_proposal_covariance( bij=self.bij, vars=vars, model=model, pop_size=1000) t1 = time() logger.info('Time for proposal covariance init: %f' % (t1 - t0)) scale = self.covariance elif covariance is None: scale = num.ones(sum(v.dsize for v in vars)) else: scale = covariance self.proposal_name = proposal_name self.proposal_dist = choose_proposal( self.proposal_name, scale=scale) self.proposal_samples_array = self.proposal_dist(n_chains) self.chain_previous_lpoint = [[]] * self.n_chains self._tps = None
def astep(self, q0): if self.stage == 0: l_new = self.logp_forw(q0) if not np.isfinite(l_new[self._llk_index]): raise ValueError( 'Got NaN in likelihood evaluation! ' 'Invalid model definition?') q_new = q0 else: if self.stage_sample == 0: self.proposal_samples_array = self.proposal_dist( self.n_steps).astype(theano.config.floatX) if not self.steps_until_tune and self.tune: # Tune scaling parameter logger.debug('Tuning: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) self.scaling = utility.scalar2floatX( pm.metropolis.tune( self.scaling, self.accepted / float(self.tune_interval))) # Reset counter self.steps_until_tune = self.tune_interval self.accepted = 0 logger.debug( 'Get delta: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) delta = self.proposal_samples_array[self.stage_sample, :] * \ self.scaling if self.any_discrete: if self.all_discrete: delta = np.round(delta, 0) q0 = q0.astype(int) q = (q0 + delta).astype(int) else: delta[self.discrete] = np.round( delta[self.discrete], 0).astype(int) q = q0 + delta q = q[self.discrete].astype(int) else: q = q0 + delta l0 = self.chain_previous_lpoint[self.chain_index] if self.check_bnd: logger.debug('Checking bound: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) varlogp = self.check_bnd(q) if np.isfinite(varlogp): logger.debug('Calc llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) llk = self.logp_forw(q) logger.debug('Select llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) q_new, accepted = pm.metropolis.metrop_select( self.beta * ( llk[self._llk_index] - l0[self._llk_index]), q, q0) if accepted: logger.debug('Accepted: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) self.accepted += 1 l_new = llk self.chain_previous_lpoint[self.chain_index] = l_new else: logger.debug('Rejected: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) l_new = l0 else: q_new = q0 l_new = l0 else: logger.debug('Calc llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) llk = self.logp_forw(q) logger.debug('Select: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) q_new, accepted = pm.metropolis.metrop_select( self.beta * (llk[self._llk_index] - l0[self._llk_index]), q, q0) if accepted: self.accepted += 1 l_new = llk self.chain_previous_lpoint[self.chain_index] = l_new else: l_new = l0 logger.debug( 'Counters: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) self.steps_until_tune -= 1 self.stage_sample += 1 # reset sample counter if self.stage_sample == self.n_steps: self.stage_sample = 0 logger.debug( 'End step: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) return q_new, l_new
def astep(self, q0): if self.stage == 0: l_new = self.logp_forw(q0) if not num.isfinite(l_new[self._llk_index]): raise ValueError( 'Got NaN in likelihood evaluation! ' 'Invalid model definition? ' 'Or starting point outside prior bounds!') q_new = q0 else: if self.stage_sample == 0: self.proposal_samples_array = self.proposal_dist( self.n_steps).astype(tconfig.floatX) if not self.steps_until_tune and self.tune: # Tune scaling parameter logger.debug('Tuning: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) self.scaling = utility.scalar2floatX( step_tune( self.scaling, self.accepted / float(self.tune_interval))) # Reset counter self.steps_until_tune = self.tune_interval self.accepted = 0 logger.debug( 'Get delta: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) delta = self.proposal_samples_array[self.stage_sample, :] * \ self.scaling if self.any_discrete: if self.all_discrete: delta = num.round(delta, 0) q0 = q0.astype(int) q = (q0 + delta).astype(int) else: delta[self.discrete] = num.round( delta[self.discrete], 0).astype(int) q = q0 + delta q = q[self.discrete].astype(int) else: q = q0 + delta try: l0 = self.chain_previous_lpoint[self.chain_index] llk0 = l0[self._llk_index] except IndexError: l0 = self.logp_forw(q0) self.chain_previous_lpoint[self.chain_index] = l0 llk0 = l0[self._llk_index] if self.check_bound: logger.debug('Checking bound: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) varlogp = self.check_bnd(q) if num.isfinite(varlogp): logger.debug('Calc llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) lp = self.logp_forw(q) logger.debug('Select llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) tempered_llk_ratio = self.beta * ( lp[self._llk_index] - l0[self._llk_index]) q_new, accepted = metrop_select( tempered_llk_ratio, q, q0) if accepted: logger.debug('Accepted: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) logger.debug('proposed: %f previous: %f' % ( lp[self._llk_index], llk0)) self.accepted += 1 l_new = lp self.chain_previous_lpoint[self.chain_index] = l_new else: logger.debug('Rejected: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) logger.debug('proposed: %f previous: %f' % ( lp[self._llk_index], l0[self._llk_index])) l_new = l0 else: q_new = q0 l_new = l0 else: logger.debug('Calc llk: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) lp = self.logp_forw(q) logger.debug('Select: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) q_new, accepted = metrop_select( self.beta * (lp[self._llk_index] - llk0), q, q0) if accepted: self.accepted += 1 l_new = lp self.chain_previous_lpoint[self.chain_index] = l_new else: l_new = l0 logger.debug( 'Counters: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) self.steps_until_tune -= 1 self.stage_sample += 1 self.cumulative_samples += 1 # reset sample counter if self.stage_sample == self.n_steps: self.stage_sample = 0 logger.debug( 'End step: Chain_%i step_%i' % ( self.chain_index, self.stage_sample)) return q_new, l_new
def __init__(self, vars=None, out_vars=None, covariance=None, scale=1., n_chains=100, tune=True, tune_interval=100, model=None, check_bound=True, likelihood_name='like', proposal_name='MultivariateNormal', coef_variation=1., **kwargs): model = modelcontext(model) if vars is None: vars = model.vars vars = inputvars(vars) if out_vars is None: out_vars = model.unobserved_RVs out_varnames = [out_var.name for out_var in out_vars] self.scaling = utility.scalar2floatX(np.atleast_1d(scale)) if covariance is None and proposal_name == 'MultivariateNormal': self.covariance = np.eye(sum(v.dsize for v in vars)) scale = self.covariance elif covariance is None: scale = np.ones(sum(v.dsize for v in vars)) else: scale = covariance self.tune = tune self.check_bnd = check_bound self.tune_interval = tune_interval self.steps_until_tune = tune_interval self.proposal_name = proposal_name self.proposal_dist = choose_proposal( self.proposal_name, scale=scale) self.proposal_samples_array = self.proposal_dist(n_chains) self.stage_sample = 0 self.accepted = 0 self.beta = 0 self.stage = 0 self.chain_index = 0 self.resampling_indexes = np.arange(n_chains) self.coef_variation = coef_variation self.n_chains = n_chains self.likelihoods = np.zeros(n_chains) self.likelihood_name = likelihood_name self._llk_index = out_varnames.index(likelihood_name) self.discrete = np.concatenate( [[v.dtype in discrete_types] * (v.dsize or 1) for v in vars]) self.any_discrete = self.discrete.any() self.all_discrete = self.discrete.all() # create initial population self.population = [] self.array_population = np.zeros(n_chains) for i in range(self.n_chains): dummy = pm.Point( {v.name: v.random() for v in vars}, model=model) self.population.append(dummy) self.population[0] = model.test_point self.chain_previous_lpoint = copy.deepcopy(self.population) shared = make_shared_replacements(vars, model) self.logp_forw = logp_forw(out_vars, vars, shared) self.check_bnd = logp_forw([model.varlogpt], vars, shared) super(SMC, self).__init__(vars, out_vars, shared)
def starttime_min(self): return scalar2floatX(self.config.starttime_min, tconfig.floatX)
def duration_min(self): return scalar2floatX(self.config.duration_min, tconfig.floatX)
def duration_sampling(self): return scalar2floatX(self.config.duration_sampling, tconfig.floatX)
def starttime_sampling(self): return scalar2floatX(self.config.starttime_sampling, tconfig.floatX)