Ejemplo n.º 1
0
class SingleBanana(object):
    """Creates the log_target as in [WKB09]

        :param dim:

            integer; the dimension

    """

    def __init__(self, dim, twistdim):
        assert dim >= 2
        assert twistdim < dim - 1
        self.dim = dim
        self.twistdim = twistdim
        # kilbinger_params:
        self.sigma1_squared = 100.0
        self.p = 10.0
        self.b = 0.03
        self.mean = np.zeros(dim)
        self.cov = np.eye(dim)
        self.cov[twistdim, twistdim] = self.sigma1_squared
        self.underlying_gauss = Gauss(self.mean, self.cov)

    def __call__(self, x):
        x = np.array(x)
        x[1 + self.twistdim] += self.b * (x[0 + self.twistdim] * x[0 + self.twistdim] - self.sigma1_squared)
        return self.underlying_gauss.evaluate(x)
Ejemplo n.º 2
0
class LogTarget(object):
    """Creates the log_target as in [WKB09]

        :param dim:

            integer; the dimension

    """
    def __init__(self, dim):
        assert dim >= 2
        self.dim = dim
        # kilbinger_params:
        self.sigma1_squared = 100.
        self.p = 10.
        self.b = .03
        self.mean = np.zeros(dim)
        self.cov = np.diag([self.sigma1_squared] + [1. for i in range(dim-1)])
        self.underlying_gauss = Gauss(self.mean, self.cov)

    def __call__(self, x):
        x1 = np.array(x)
        x1[1] += 5.
        x1[2] -= 5.
        x1[1] += self.b * (x1[0]*x1[0] - self.sigma1_squared)
        x2 = np.array(x)
        x2[1] -= 5.
        x2[2] += 5.
        x2[2] += self.b * (x2[0]*x2[0] - self.sigma1_squared)
        return np.log(  .5 * np.exp( self.underlying_gauss.evaluate(x1) )
                     +  .5 * np.exp( self.underlying_gauss.evaluate(x2) )  )
Ejemplo n.º 3
0
 def __init__(self, dim):
     assert dim >= 2
     self.dim = dim
     # kilbinger_params:
     self.sigma1_squared = 100.
     self.p = 10.
     self.b = .03
     self.mean = np.zeros(dim)
     self.cov = np.diag([self.sigma1_squared] + [1. for i in range(dim-1)])
     self.underlying_gauss = Gauss(self.mean, self.cov)
Ejemplo n.º 4
0
 def __init__(self, dim, twistdim):
     assert dim >= 2
     assert twistdim < dim - 1
     self.dim = dim
     self.twistdim = twistdim
     # kilbinger_params:
     self.sigma1_squared = 100.0
     self.p = 10.0
     self.b = 0.03
     self.mean = np.zeros(dim)
     self.cov = np.eye(dim)
     self.cov[twistdim, twistdim] = self.sigma1_squared
     self.underlying_gauss = Gauss(self.mean, self.cov)
Ejemplo n.º 5
0
    def run_iter(
        self,
        num_gauss_samples=400,
        max_ncalls=100000,
        min_ess=400,
        max_improvement_loops=4,
        heavytail_laplaceapprox=True,
        verbose=True,
    ):
        """
        Iterative version of run(). See documentation there.
        Returns current samples on each iteration.
        """
        paramnames = self.paramnames
        loglike = self.loglike
        transform = self.transform

        ndim = len(paramnames)
        optu, cov, invcov = self.optu, self.cov, self.invcov
        # for numerical stability, use 1e260, so that we can go down be 1e-100,
        # but up by 1e600
        self.Loffset = self.optL  #+ 600

        # first iteration: create a single gaussian and importance-sample
        if self.log:
            self.logger.info("Initiating gaussian importance sampler")

        def log_target(u):
            """ log-posterior to sample from """
            if (u > 1).any() or (u < 0).any():
                return -np.inf
            p = transform(u)
            L = loglike(p)
            return L - self.Loffset

        if not heavytail_laplaceapprox:
            initial_proposal = Gauss(optu, cov)
        else:
            # make a few gaussians, in case the fit errors were too narrow
            means, covs, weights = _make_initial_proposal(optu, cov)
            initial_proposal = create_gaussian_mixture(means, covs, weights)

        mixes = [initial_proposal]

        N = num_gauss_samples
        Nhere = N // self.mpi_size
        if self.mpi_size > 1:
            SequentialIS = ImportanceSampler
            from pypmc.tools.parallel_sampler import MPISampler
            sampler = MPISampler(SequentialIS,
                                 target=log_target,
                                 proposal=initial_proposal,
                                 prealloc=Nhere)
        else:
            sampler = ImportanceSampler(target=log_target,
                                        proposal=initial_proposal,
                                        prealloc=Nhere)

        if self.log:
            self.logger.info("    sampling %d ..." % N)
        np.seterr(over="warn")
        sampler.run(Nhere)
        self.ncall += Nhere * self.mpi_size

        samples, weights = self._collect_samples(sampler)
        assert weights.sum() > 0, 'All samples have weight zero.'

        vbmix = None
        for it in range(max_improvement_loops):
            ess_fraction = ess(weights)
            if self.log:
                self.logger.info("    sampling efficiency: %.3f%%" %
                                 (ess_fraction * 100))

            if it % 3 == 0:
                if self.log:
                    self.logger.info("Optimizing proposal (from scratch) ...")
                mix = _make_proposal(samples, weights, optu, cov, invcov)
                vb = GaussianInference(samples,
                                       weights=weights,
                                       initial_guess=mix,
                                       W0=np.eye(ndim) * 1e10)
                vb_prune = 0.5 * len(vb.data) / vb.K
            else:
                if self.log:
                    self.logger.info("Optimizing proposal (from previous) ...")
                prior_for_proposal_update = vb.posterior2prior()
                prior_for_proposal_update.pop('alpha0')
                vb = GaussianInference(samples,
                                       initial_guess=vbmix,
                                       weights=weights,
                                       **prior_for_proposal_update)

            if self.log:
                self.logger.info('    running variational Bayes ...')
            vb.run(1000,
                   rel_tol=1e-8,
                   abs_tol=1e-5,
                   prune=vb_prune,
                   verbose=False)
            vbmix = vb.make_mixture()
            if self.log:
                self.logger.info('    reduced from %d to %d components' %
                                 (len(mix.components), len(vbmix.components)))

            sampler.proposal = vbmix

            if self.log:
                self.logger.info("Importance sampling %d ..." % N)
            sampler.run(N // self.mpi_size)
            self.ncall += (N // self.mpi_size) * self.mpi_size
            mixes.append(vbmix)

            samples, weights = self._collect_samples(sampler)
            ess_fraction = ess(weights)
            if self.log:
                self.logger.debug("    sampling efficiency: %.3f%%" %
                                  (ess_fraction * 100))
                self.logger.debug("    obtained %.0f new effective samples" %
                                  (ess_fraction * len(weights)))

            samples, weights = self._collect_samples(sampler,
                                                     all=True,
                                                     mixes=mixes)
            ess_fraction = ess(weights)
            Ndone = ess_fraction * len(weights)

            result = self._update_results(samples, weights)
            if Ndone >= min_ess:
                if self.log:
                    self.logger.info(
                        "Status: Have %d total effective samples, done." %
                        Ndone)
                yield result
                break
            elif self.ncall > max_ncalls:
                if self.log:
                    self.logger.info(
                        "Status: Have %d total effective samples, reached max number of calls."
                        % Ndone)
                yield result
                break
            else:
                N = int(1.4 * min(max_ncalls - self.ncall, N))
                if self.log:
                    self.logger.info(
                        "Status: Have %d total effective samples, sampling %d next."
                        % (Ndone, N))
                yield result