def prepare_problem(problemname, ndim, nlive, sampler): loglike, grad, volume, warmup = get_problem(problemname, ndim=ndim) if hasattr(sampler, 'set_gradient'): sampler.set_gradient(grad) np.random.seed(1) us = np.random.uniform(size=(nlive, ndim)) if ndim > 1: transformLayer = AffineLayer() else: transformLayer = ScalingLayer() transformLayer.optimize(us, us) region = MLFriends(us, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid(minvol=1.0) Ls = np.array([loglike(u) for u in us]) ncalls = 0 nok = 0 i = 0 while True: if i % int(nlive * 0.2) == 0: minvol = (1 - 1. / nlive)**i nextTransformLayer = transformLayer.create_new(us, region.maxradiussq, minvol=minvol) nextregion = MLFriends(us, nextTransformLayer) nextregion.maxradiussq, nextregion.enlarge = nextregion.compute_enlargement( nbootstraps=30) if nextregion.estimate_volume() <= region.estimate_volume(): region = nextregion transformLayer = region.transformLayer region.create_ellipsoid(minvol=minvol) # replace lowest likelihood point j = np.argmin(Ls) Lmin = float(Ls[j]) while True: u, v, logl, nc = sampler.__next__(region, Lmin, us, Ls, transform, loglike) ncalls += nc if logl is not None: break us[j, :] = u region.u[j, :] = u region.unormed[j, :] = region.transformLayer.transform(u) Ls[j] = logl i = i + 1 #print(i, Lmin, volume(Lmin, ndim)) if np.isfinite(volume(Lmin, ndim)): nok += 1 if nok > 2 * nlive + 1000: break return region, i, Lmin, us, Ls, transform, loglike
def evaluate_warmed_sampler(problemname, ndim, nlive, nsteps, sampler): loglike, grad, volume, warmup = get_problem(problemname, ndim=ndim) if hasattr(sampler, 'set_gradient'): sampler.set_gradient(grad) np.random.seed(1) def multi_loglike(xs): return np.asarray([loglike(x) for x in xs]) us = np.array([warmup(ndim) for i in range(nlive)]) Ls = np.array([loglike(u) for u in us]) vol0 = max((volume(Li, ndim) for Li in Ls)) nwarmup = 3 * nlive if ndim > 1: transformLayer = AffineLayer() else: transformLayer = ScalingLayer() transformLayer.optimize(us, us) region = MLFriends(us, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement(nbootstraps=30) region.create_ellipsoid(minvol=vol0) assert region.ellipsoid_center is not None sampler.region_changed(Ls, region) Lsequence = [] stepsequence = [] ncalls = 0 for i in tqdm.trange(nsteps + nwarmup): if i % int(nlive * 0.2) == 0: minvol = (1 - 1./nlive)**i * vol0 with warnings.catch_warnings(), np.errstate(all='raise'): try: nextTransformLayer = transformLayer.create_new(us, region.maxradiussq, minvol=minvol) nextregion = MLFriends(us, nextTransformLayer) nextregion.maxradiussq, nextregion.enlarge = nextregion.compute_enlargement(nbootstraps=30) if nextregion.estimate_volume() <= region.estimate_volume(): nextregion.create_ellipsoid(minvol=minvol) region = nextregion transformLayer = region.transformLayer assert region.ellipsoid_center is not None sampler.region_changed(Ls, region) except Warning as w: print("not updating region because: %s" % w) except FloatingPointError as e: print("not updating region because: %s" % e) except np.linalg.LinAlgError as e: print("not updating region because: %s" % e) # replace lowest likelihood point j = np.argmin(Ls) Lmin = float(Ls[j]) while True: u, v, logl, nc = sampler.__next__(region, Lmin, us, Ls, transform, multi_loglike) if i > nwarmup: ncalls += nc if logl is not None: assert np.isfinite(u).all(), u assert np.isfinite(v).all(), v assert np.isfinite(logl), logl break if i > nwarmup: Lsequence.append(Lmin) stepsequence.append(quantify_step(us[sampler.starti,:], u)) us[j,:] = u Ls[j] = logl Lsequence = np.asarray(Lsequence) return Lsequence, ncalls, np.array(stepsequence)