Exemple #1
0
 def compute_post(self):
     self.prop.lpost[0] = self.prior.logpdf(self.prop.theta)
     if np.isfinite(self.prop.lpost[0]):
         ssm_kwargs_dict = ssp.rec_to_dict(self.prop.theta[0])
         pf = self.alg_instance(ssm_kwargs_dict)
         pf.run()
         self.prop_history = pf.hist  # store SMC sampler history
         self.prop.lpost[0] += pf.logLt
Exemple #2
0
 def loglik(self, theta, t=None): 
     # Note: for simplicity we ignore argument t here,
     # and compute the full log-likelihood
     ll = np.zeros(theta.shape[0])
     for n, th in enumerate(theta): 
         mod = ReparamLinGauss(**smc_samplers.rec_to_dict(th))
         exact = mod.kalman_filter(data)
         ll[n] = np.sum(exact.logpyts)
     return ll 
Exemple #3
0
    def compute_post(self):
        self.prop.lpost[0] = self.prior.logpdf(self.prop.theta)
        if np.isfinite(self.prop.lpost[0]):
            pf = self.alg_instance(ssp.rec_to_dict(self.prop.theta[0]))
            pf.run()
            self.prop.lpost[0] += pf.logLt

            if self.keep_states:
                #self.states.append(np.array([m['mean'] for m in pf.summaries.moments]))
                self.states.append(np.array(pf.hist.extract_one_trajectory()))
Exemple #4
0
 def update_states(self, theta, x):
     if x is None:
         new_x, _ = ssm_cls(**smc_samplers.rec_to_dict(theta)).simulate(
             self.T)
     else:
         new_x = x[:]
     xa = np.array(new_x).flatten()
     m = np.matmul(self.A, self.tod * xa + self.grad_log_lik(xa))
     xp = stats.multivariate_normal.rvs(mean=m, cov=self.cov)
     mh_log_ratio = (self.log_lik(xp) - self.log_lik(xa) - self.h(xp, xa) +
                     self.h(xa, xp))
     if np.log(random.uniform()) <= mh_log_ratio:
         self.nacc += 1
         return list(xp)
     else:
         return new_x
Exemple #5
0
 def update_states(self, theta, x):
     dt = smc_samplers.rec_to_dict(theta)
     T = len(self.data)
     if x is None:
         new_x, _ = ssm_cls(**dt).simulate(T)
     else:
         new_x = x[:]
     mu, sigma, rho = dt['mu'], dt['sigma'], dt['rho']
     a = 1. + rho**2
     sp = sigma / np.sqrt(a)
     c = rho / a
     for t, yt in enumerate(self.data):
         if t == 0:
             m = mu + rho * (new_x[1] - mu)
             s = sigma
         elif t == T - 1:
             m = mu + rho * (new_x[-2] - mu)
             s = sigma
         else:
             m = mu + c * (new_x[t - 1] + new_x[t + 1] - 2. * mu)
             s = sp
         new_x[t] = reject_sv(m, s, yt)
     return new_x
Exemple #6
0
 def fk_mod(self, theta):
     ssm = self.ssm_cls(**ssp.rec_to_dict(theta))
     return self.fk_cls(ssm=ssm, data=self.data)
Exemple #7
0
 def compute_post(self):
     self.prop.lpost[0] = self.prior.logpdf(self.prop.theta)
     if np.isfinite(self.prop.lpost[0]):
         pf = self.alg_instance(ssp.rec_to_dict(self.prop.theta[0]))
         pf.run()
         self.prop.lpost[0] += pf.logLt
Exemple #8
0
                           Nx=Nx, niter=niter, adaptive=False, rw_cov=rw_cov, 
                           verbose=10)

# Run the algorithms 
####################

for alg_name, alg in algos.items(): 
    print('\nRunning ' + alg_name)
    alg.run()
    print('CPU time: %.2f min' % (alg.cpu_time / 60))

# Compute variances 
###################
thin = int(niter / 100)  # compute average (of variances) over 100 points
thetas = algos['mh'].chain.theta[(burnin - 1)::thin]
fks = {k: ssm.Bootstrap(ssm=ReparamLinGauss(**smc_samplers.rec_to_dict(th)), data=data)
                        for k, th in enumerate(thetas)}
outf = lambda pf: pf.logLt
print('Computing variances of log-lik estimates as a function of N')
results = particles.multiSMC(fk=fks, N=Nxs, nruns=4, nprocs=0, out_func=outf)
df = pandas.DataFrame(results)
df_var = df.groupby(['fk', 'N']).var()  # variance as a function of fk and N
df_var = df_var.reset_index()
df_var_mean = df_var.groupby('N').mean()  # mean variance as function of N

# Plots
#######
savefigs = False
plt.style.use('ggplot')

def msjd(theta):
Exemple #9
0
                           verbose=10)

# Run the algorithms
####################

for alg_name, alg in algos.items():
    print('\nRunning ' + alg_name)
    alg.run()
    print('CPU time: %.2f min' % (alg.cpu_time / 60))

# Compute variances
###################
thin = int(niter / 100)  # compute average (of variances) over 100 points
thetas = algos['mh'].chain.theta[(burnin - 1)::thin]
fks = {
    k: ssms.Bootstrap(ssm=ReparamLinGauss(**smc_samplers.rec_to_dict(th)),
                      data=data)
    for k, th in enumerate(thetas)
}
outf = lambda pf: pf.logLt
print('Computing variances of log-lik estimates as a function of N')
results = particles.multiSMC(fk=fks, N=Nxs, nruns=4, nprocs=0, out_func=outf)
df = pandas.DataFrame(results)
df_var = df.groupby(['fk', 'N']).var()  # variance as a function of fk and N
df_var = df_var.reset_index()
df_var_mean = df_var.groupby('N').mean()  # mean variance as function of N

# Plots
#######
savefigs = True  # False if you don't want to save plots as pdfs
plt.style.use('ggplot')