def test_region_sampling_scaling(plot=False): np.random.seed(1) upoints = np.random.uniform(0.2, 0.5, size=(1000, 2)) upoints[:, 1] *= 0.1 transformLayer = ScalingLayer(wrapped_dims=[]) transformLayer.optimize(upoints, upoints) region = MLFriends(upoints, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) print("enlargement factor:", region.enlarge, 1 / region.enlarge) region.create_ellipsoid() nclusters = transformLayer.nclusters assert nclusters == 1 assert np.allclose(region.unormed, region.transformLayer.transform( upoints)), "transform should be reproducible" assert region.inside( upoints).all(), "live points should lie near live points" if plot: plt.plot(upoints[:, 0], upoints[:, 1], 'x ') for method in region.sampling_methods: points, nc = method(nsamples=400) plt.plot(points[:, 0], points[:, 1], 'o ', label=str(method.__name__)) plt.legend(loc='best') plt.savefig('test_regionsampling_scaling.pdf', bbox_inches='tight') plt.close() for method in region.sampling_methods: print("sampling_method:", method) newpoints = method(nsamples=4000) lo1, lo2 = newpoints.min(axis=0) hi1, hi2 = newpoints.max(axis=0) assert 0.15 < lo1 < 0.25, (method.__name__, newpoints, lo1, hi1, lo2, hi2) assert 0.015 < lo2 < 0.025, (method.__name__, newpoints, lo1, hi1, lo2, hi2) assert 0.45 < hi1 < 0.55, (method.__name__, newpoints, lo1, hi1, lo2, hi2) assert 0.045 < hi2 < 0.055, (method.__name__, newpoints, lo1, hi1, lo2, hi2) assert region.inside(newpoints).mean() > 0.99, region.inside( newpoints).mean() region.maxradiussq = 1e-90 assert np.allclose(region.unormed, region.transformLayer.transform( upoints)), "transform should be reproducible" assert region.inside( upoints).all(), "live points should lie very near themselves"
def test_ellipsoids(): tpoints = np.random.uniform(0.4, 0.6, size=(1000, 1)) tregion = WrappingEllipsoid(tpoints) print(tregion.variable_dims) tregion.enlarge = tregion.compute_enlargement(nbootstraps=30) tregion.create_ellipsoid() for umax in 0.6, 0.5: print() print(umax) points = np.random.uniform(0.4, 0.6, size=(1000, 3)) points = points[points[:, 0] < umax] tpoints = points * 10 tpoints[:, 0] = np.floor(tpoints[:, 0]) print(points, tpoints) transformLayer = AffineLayer(wrapped_dims=[]) transformLayer.optimize(points, points) region = MLFriends(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() inside = region.inside(points) assert inside.shape == (len(points), ), (inside.shape, points.shape) assert inside.all() region = RobustEllipsoidRegion(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() inside = region.inside(points) assert inside.shape == (len(points), ), (inside.shape, points.shape) assert inside.all() region = SimpleRegion(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() inside = region.inside(points) assert inside.shape == (len(points), ), (inside.shape, points.shape) assert inside.all() tregion = WrappingEllipsoid(tpoints) print(tregion.variable_dims) tregion.enlarge = tregion.compute_enlargement(nbootstraps=30) tregion.create_ellipsoid() inside = tregion.inside(tpoints) assert inside.shape == (len(tpoints), ), (inside.shape, tpoints.shape) assert inside.all()
def test_aharm_sampler(): def loglike(theta): return -0.5 * (((theta - 0.5) / 0.01)**2).sum(axis=1) def transform(x): return x seed = 1 Nlive = 10 np.random.seed(seed) us = np.random.uniform(size=(Nlive, 2)) Ls = loglike(us) Lmin = Ls.min() transformLayer = ScalingLayer() region = MLFriends(us, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement() region.create_ellipsoid() assert region.inside(us).all() nsteps = 10 sampler = AHARMSampler(nsteps=nsteps, region_filter=True) nfunccalls = 0 ncalls = 0 while True: u, p, L, nc = sampler.__next__(region, Lmin, us, Ls, transform, loglike) nfunccalls += 1 ncalls += nc if u is not None: break if nfunccalls > 100 + nsteps: assert False, ('infinite loop?', seed, nsteps, Nlive) print("done in %d function calls, %d likelihood evals" % (nfunccalls, ncalls))
def test_reversible_gradient(plot=False): def loglike(x): x, y = x.transpose() return -0.5 * (x**2 + ((y - 0.5) / 0.2)**2) def transform(u): return u Lmin = -0.5 for i in [84] + list(range(1, 100)): print("setting seed = %d" % i) np.random.seed(i) points = np.random.uniform(size=(10000, 2)) L = loglike(points) mask = L > Lmin points = points[mask, :][:100, :] active_u = points active_values = L[mask][:100] transformLayer = AffineLayer(wrapped_dims=[]) transformLayer.optimize(points, points) region = MLFriends(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() nclusters = transformLayer.nclusters assert nclusters == 1 assert np.allclose(region.unormed, region.transformLayer.transform( points)), "transform should be reproducible" assert region.inside( points).all(), "live points should lie near live points" if i == 84: v = np.array([0.03477044, -0.01977415]) reflpoint = np.array([0.09304075, 0.29114574]) elif i == 4: v = np.array([0.03949306, -0.00634806]) reflpoint = np.array([0.9934771, 0.55358031]) else: v = np.random.normal(size=2) v /= (v**2).sum()**0.5 v *= 0.04 j = np.random.randint(len(active_u)) reflpoint = np.random.normal(active_u[j, :], 0.04) if not (reflpoint < 1).all() and not (reflpoint > 0).all(): continue bpts = region.transformLayer.transform(reflpoint).reshape((1, -1)) tt = get_sphere_tangents(region.unormed, bpts) t = region.transformLayer.untransform(tt * 1e-3 + region.unormed) - region.u # compute new vector normal = t / norm(t, axis=1).reshape((-1, 1)) print("reflecting at ", reflpoint, "with direction", v) mask_forward1, angles, anglesnew = get_reflection_angles(normal, v) if mask_forward1.any(): j = np.argmin( ((region.unormed[mask_forward1, :] - bpts)**2).sum(axis=1)) k = np.arange(len(normal))[mask_forward1][j] angles_used = angles[k] normal_used = normal[k, :] print("chose normal", normal_used, k) #chosen_point = region.u[k,:] vnew = -(v - 2 * angles_used * normal_used) assert vnew.shape == v.shape mask_forward2, angles2, anglesnew2 = get_reflection_angles( normal, vnew) #j2 = np.argmin(((region.unormed[mask_forward2,:] - bpts)**2).sum(axis=1)) #chosen_point2 = region.u[mask_forward2,:][0,:] #assert j2 == j, (j2, j) assert mask_forward2[k] #assert_allclose(chosen_point, chosen_point2) #for m, a, b, m2, a2, b2 in zip(mask_forward1, angles, anglesnew, mask_forward2, angles2, anglesnew2): # if m != m2: # print(' ', m, a, b, m2, a2, b2) #print("using normal", normal) #print("changed v from", v, "to", vnew) #angles2 = -(normal * (vnew / norm(vnew))).sum(axis=1) #mask_forward2 = angles < 0 if plot: plt.figure(figsize=(5, 5)) plt.title('%d' % mask_forward1.sum()) plt.plot((reflpoint + v)[0], (reflpoint + v)[1], '^', color='orange') plt.plot((reflpoint + vnew)[:, 0], (reflpoint + vnew)[:, 1], '^ ', color='lime') plt.plot(reflpoint[0], reflpoint[1], '^ ', color='r') plt.plot(region.u[:, 0], region.u[:, 1], 'x ', ms=2, color='k') plt.plot(region.u[mask_forward1, 0], region.u[mask_forward1, 1], 'o ', ms=6, mfc='None', mec='b') plt.plot(region.u[mask_forward2, 0], region.u[mask_forward2, 1], 's ', ms=8, mfc='None', mec='g') plt.xlim(0, 1) plt.ylim(0, 1) plt.savefig('test_flatnuts_reversible_gradient_%d.png' % i, bbox_inches='tight') plt.close() assert mask_forward1[k] == mask_forward2[k], (mask_forward1[k], mask_forward2[k]) print("reflecting at ", reflpoint, "with direction", v) # make that step, then try to go back j = np.arange(len(normal))[mask_forward1][0] normal = normal[j, :] angles = (normal * (v / norm(v))).sum() v2 = v - 2 * angle(normal, v) * normal print("reflecting with", normal, "new direction", v2) #newpoint = reflpoint + v2 #angles2 = (normal * (v2 / norm(v2))).sum() v3 = v2 - 2 * angle(normal, v2) * normal print("re-reflecting gives direction", v3) assert_allclose(v3, v) print() print("FORWARD:", v, reflpoint) samplingpath = SamplingPath(reflpoint - v, v, active_values[0]) contourpath = ContourSamplingPath(samplingpath, region) normal = contourpath.gradient(reflpoint) if normal is not None: assert normal.shape == v.shape, (normal.shape, v.shape) print("BACKWARD:", v, reflpoint) v2 = -(v - 2 * angle(normal, v) * normal) normal2 = contourpath.gradient(reflpoint) assert_allclose(normal, normal2) normal2 = normal v3 = -(v2 - 2 * angle(normal2, v2) * normal2) assert_allclose(v3, v)
def test_detailed_balance(): def loglike(x): x, y = x.transpose() return -0.5 * (x**2 + ((y - 0.5) / 0.2)**2) def transform(u): return u Lmin = -0.5 for i in range(1, 100): print() print("---- seed=%d ----" % i) print() np.random.seed(i) points = np.random.uniform(size=(10000, 2)) L = loglike(points) mask = L > Lmin points = points[mask, :][:400, :] active_u = points active_values = L[mask][:400] transformLayer = AffineLayer(wrapped_dims=[]) transformLayer.optimize(points, points) region = MLFriends(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() nclusters = transformLayer.nclusters assert nclusters == 1 assert np.allclose(region.unormed, region.transformLayer.transform( points)), "transform should be reproducible" assert region.inside( points).all(), "live points should lie near live points" v = np.random.normal(size=2) v /= (v**2).sum()**0.5 v *= 0.04 print("StepSampler ----") print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0]) samplingpath = SamplingPath(active_u[0], v, active_values[0]) problem = dict(loglike=loglike, transform=transform, Lmin=Lmin) sampler = ClockedStepSampler(ContourSamplingPath(samplingpath, region)) check_starting_point(sampler, active_u[0], active_values[0], **problem) sampler.expand_onestep(fwd=True, **problem) sampler.expand_onestep(fwd=True, **problem) sampler.expand_onestep(fwd=True, **problem) sampler.expand_onestep(fwd=True, **problem) sampler.expand_onestep(fwd=False, **problem) sampler.expand_to_step(4, **problem) sampler.expand_to_step(-4, **problem) check_starting_point(sampler, active_u[0], active_values[0], **problem) starti, startx, startv, startL = max(sampler.points) print() print("BACKWARD SAMPLING FROM", starti, startx, startv, startL) samplingpath2 = SamplingPath(startx, -startv, startL) sampler2 = ClockedStepSampler( ContourSamplingPath(samplingpath2, region)) check_starting_point(sampler2, startx, startL, **problem) sampler2.expand_to_step(starti, **problem) check_starting_point(sampler2, startx, startL, **problem) starti2, startx2, startv2, startL2 = max(sampler2.points) assert_allclose(active_u[0], startx2) assert_allclose(v, -startv2) starti, startx, startv, startL = min(sampler.points) print() print("BACKWARD SAMPLING FROM", starti, startx, startv, startL) samplingpath3 = SamplingPath(startx, startv, startL) sampler3 = ClockedStepSampler( ContourSamplingPath(samplingpath3, region)) check_starting_point(sampler3, startx, startL, **problem) sampler3.expand_to_step(-starti, **problem) check_starting_point(sampler3, startx, startL, **problem) starti3, startx3, startv3, startL3 = max(sampler3.points) assert_allclose(active_u[0], startx3) assert_allclose(v, startv3) print() print("BisectSampler ----") log = dict(log=True) print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0]) samplingpath = SamplingPath(active_u[0], v, active_values[0]) sampler = ClockedBisectSampler( ContourSamplingPath(samplingpath, region), **log) check_starting_point(sampler, active_u[0], active_values[0], **problem) sampler.expand_to_step(10, **problem) check_starting_point(sampler, active_u[0], active_values[0], **problem) starti, startx, startv, startL = max(sampler.points) print() print("BACKWARD SAMPLING FROM", starti, startx, startv, startL) samplingpath2 = SamplingPath(startx, -startv, startL) sampler2 = ClockedBisectSampler( ContourSamplingPath(samplingpath2, region), **log) check_starting_point(sampler2, startx, startL, **problem) sampler2.expand_to_step(starti, **problem) check_starting_point(sampler2, startx, startL, **problem) starti2, startx2, startv2, startL2 = max(sampler2.points) if gap_free_path(sampler, 0, starti, **problem) and gap_free_path( sampler2, 0, starti2, **problem): assert_allclose(active_u[0], startx2) assert_allclose(v, -startv2) starti, startx, startv, startL = min(sampler.points) print() print("BACKWARD SAMPLING FROM", starti, startx, startv, startL) samplingpath3 = SamplingPath(startx, -startv, startL) sampler3 = ClockedBisectSampler( ContourSamplingPath(samplingpath3, region), **log) check_starting_point(sampler3, startx, startL, **problem) sampler3.expand_to_step(starti, **problem) check_starting_point(sampler3, startx, startL, **problem) starti3, startx3, startv3, startL3 = min(sampler3.points) if gap_free_path(sampler, 0, starti, **problem) and gap_free_path( sampler3, 0, starti3, **problem): assert_allclose(active_u[0], startx3) assert_allclose(v, -startv3) print() print("NUTSSampler ----") print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0]) samplingpath = SamplingPath(active_u[0], v, active_values[0]) np.random.seed(i) sampler = ClockedNUTSSampler(ContourSamplingPath(samplingpath, region)) sampler.get_independent_sample(**problem)
def benchmark_transform(): npts = 400 for layer in 'scale', 'affine': print(" ndim | duration [%s]" % layer) tplotpoints = [] rplotpoints = [] nplotpoints = [] for ndim in 2, 4, 8, 16, 32, 64, 128, 256, : np.random.seed(ndim) points = np.random.uniform(0.4, 0.6, size=(npts, ndim)) transformLayer = ScalingLayer( ) if layer == 'scale' else AffineLayer() region = MLFriends(points, transformLayer) region.maxradiussq, region.enlarge = region.compute_enlargement( nbootstraps=30) region.create_ellipsoid() niter = 0 total_duration = 0 while total_duration < .1: start = time.time() u = region.transformLayer.untransform( np.random.normal(size=(ndim))) region.transformLayer.transform(u) total_duration += time.time() - start niter += 1 print('%5d | %.2fms ' % (ndim, total_duration * 1000 / niter)) tplotpoints.append((ndim, total_duration * 1000 / niter)) niter = 0 total_duration = 0 while total_duration < .1: u = np.random.normal(0.5, 0.1, size=(10, ndim)) start = time.time() region.inside(u) total_duration += time.time() - start niter += 1 print('%5d | %.2fms ' % (ndim, total_duration * 1000 / niter)) rplotpoints.append((ndim, total_duration * 1000 / niter)) niter = 0 total_duration = 0 while total_duration < .1: u = np.random.normal(0.5, 0.1, size=(10, ndim)) start = time.time() array = np.empty((10), dtype=int) array[:] = -1 array = np.empty((10), dtype=int) array[:] = -1 array = np.empty((10), dtype=int) array[:] = -1 total_duration += time.time() - start niter += 1 print('%5d | %.2fms ' % (ndim, total_duration * 1000 / niter)) nplotpoints.append((ndim, total_duration * 1000 / niter)) plt.plot(*zip(*tplotpoints), label=layer + ' transform') plt.plot(*zip(*rplotpoints), label=layer + ' region.inside') plt.plot(*zip(*nplotpoints), label=layer + ' array') plt.xlabel('Number of dimensions') plt.ylabel('Duration [ms]') plt.yscale('log') plt.xscale('log') plt.legend(loc='best', prop=dict(size=10)) plt.savefig('testtransform.pdf', bbox_inches='tight') plt.close()