def adapt(self, mcmc_chain, step_output): # this is an extension of the base adapt call KameleonWindow.adapt(self, mcmc_chain, step_output) iter_no = mcmc_chain.iteration if iter_no > self.sample_discard and iter_no < self.stop_adapt: learn_scale = 1.0 / sqrt(iter_no - self.sample_discard + 1.0) self.nu2 = exp(log(self.nu2) + learn_scale * (exp(step_output.log_ratio) - self.accstar))
def adapt(self, mcmc_chain, step_output): # this is an extension of the base adapt call KameleonWindow.adapt(self, mcmc_chain, step_output) iter_no = mcmc_chain.iteration if iter_no > self.sample_discard and iter_no < self.stop_adapt: learn_scale = 1.0 / sqrt(iter_no - self.sample_discard + 1.0) self.nu2 = exp( log(self.nu2) + learn_scale * (exp(step_output.log_ratio) - self.accstar))
def main(): distribution = Banana(dimension=8, bananicity=0.1, V=100.0) sigma = 5 print "using sigma", sigma kernel = GaussianKernel(sigma=sigma) mcmc_sampler = KameleonWindow(distribution, kernel) start = zeros(distribution.dimension) mcmc_params = MCMCParams(start=start, num_iterations=80000) chain = MCMCChain(mcmc_sampler, mcmc_params) # chain.append_mcmc_output(PlottingOutput(distribution, plot_from=3000)) chain.append_mcmc_output(StatisticsOutput(plot_times=True)) chain.run() print distribution.emp_quantiles(chain.samples)
def __init__(self, distribution, kernel, nu2=0.1, gamma=None, \ sample_discard=500, num_samples_Z=1000, stop_adapt=20000, accstar=0.234): KameleonWindow.__init__(self, distribution, kernel, nu2, gamma, \ sample_discard, num_samples_Z, stop_adapt) self.accstar = accstar
def __str__(self): s = self.__class__.__name__ + "=[" s += "accstar=" + str(self.accstar) s += ", " + KameleonWindow.__str__(self) s += "]" return s
def __init__(self, distribution, kernel, nu2=0.1, gamma=0.1, \ sample_discard=500, num_samples_Z=1000, stop_adapt=20000, accstar=0.234): KameleonWindow.__init__(self, distribution, kernel, nu2, gamma, \ sample_discard, num_samples_Z, stop_adapt) self.accstar = accstar