Exemplo n.º 1
0
def benchmark_maxradius():
    print(" ndim |  npts | duration")
    for ndim in 2, 4, 8, 16, 32, 64:
        plotpoints = []
        np.random.seed(ndim)
        for npts in 100, 400, 1000, 4000:
            points = np.random.uniform(size=(npts, ndim))
            transformLayer = ScalingLayer()
            region = MLFriends(points, transformLayer)

            niter = 0
            total_duration = 0
            while total_duration < 1:
                start = time.time()
                maxr = region.compute_maxradiussq(nbootstraps=20)
                total_duration += time.time() - start
                niter += 1
            print('%5d | %5d | %.2fms  val=%f' %
                  (ndim, npts, total_duration * 1000 / niter, maxr))
            plotpoints.append((npts, total_duration * 1000 / niter / npts**2))
        plt.plot(*zip(*plotpoints), label='ndim=%d' % ndim)

    plt.xlabel('Number of live points')
    plt.ylabel('Duration [ms] / nlive$^2$')
    plt.yscale('log')
    plt.xscale('log')
    plt.legend(loc='best', prop=dict(size=10))
    plt.savefig('testmaxradius.pdf', bbox_inches='tight')
    plt.close()
Exemplo n.º 2
0
def test_region_sampling_affine(plot=False):
    np.random.seed(1)
    upoints = np.random.uniform(size=(1000, 2))
    upoints[:, 1] *= 0.5

    transformLayer = AffineLayer(wrapped_dims=[])
    transformLayer.optimize(upoints, upoints)
    region = MLFriends(upoints, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(
        nbootstraps=30)
    print("enlargement factor:", region.enlarge, 1 / region.enlarge)
    region.create_ellipsoid()
    nclusters = transformLayer.nclusters
    assert nclusters == 1
    assert np.allclose(region.unormed, region.transformLayer.transform(
        upoints)), "transform should be reproducible"
    assert region.inside(
        upoints).all(), "live points should lie near live points"
    if plot:
        plt.plot(upoints[:, 0], upoints[:, 1], 'x ')
        for method in region.sampling_methods:
            points, nc = method(nsamples=400)
            plt.plot(points[:, 0],
                     points[:, 1],
                     'o ',
                     label=str(method.__name__))
        plt.legend(loc='best')
        plt.savefig('test_regionsampling_affine.pdf', bbox_inches='tight')
        plt.close()

    for method in region.sampling_methods:
        print("sampling_method:", method)
        newpoints = method(nsamples=4000)
        lo1, lo2 = newpoints.min(axis=0)
        hi1, hi2 = newpoints.max(axis=0)
        assert 0 <= lo1 < 0.1, (method.__name__, newpoints, lo1, hi1, lo2, hi2)
        assert 0 <= lo2 < 0.1, (method.__name__, newpoints, lo1, hi1, lo2, hi2)
        assert 0.95 < hi1 <= 1, (method.__name__, newpoints, lo1, hi1, lo2,
                                 hi2)
        assert 0.45 <= hi2 < 0.55, (method.__name__, newpoints, lo1, hi1, lo2,
                                    hi2)
        assert region.inside(newpoints).all()

    region.maxradiussq = 1e-90
    assert np.allclose(region.unormed, region.transformLayer.transform(
        upoints)), "transform should be reproducible"
    assert region.inside(
        upoints).all(), "live points should lie very near themselves"
Exemplo n.º 3
0
def test_aharm_sampler():
    def loglike(theta):
        return -0.5 * (((theta - 0.5) / 0.01)**2).sum(axis=1)

    def transform(x):
        return x

    seed = 1
    Nlive = 10
    np.random.seed(seed)
    us = np.random.uniform(size=(Nlive, 2))
    Ls = loglike(us)
    Lmin = Ls.min()
    transformLayer = ScalingLayer()
    region = MLFriends(us, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement()
    region.create_ellipsoid()
    assert region.inside(us).all()
    nsteps = 10
    sampler = AHARMSampler(nsteps=nsteps, region_filter=True)

    nfunccalls = 0
    ncalls = 0
    while True:
        u, p, L, nc = sampler.__next__(region, Lmin, us, Ls, transform,
                                       loglike)
        nfunccalls += 1
        ncalls += nc
        if u is not None:
            break
        if nfunccalls > 100 + nsteps:
            assert False, ('infinite loop?', seed, nsteps, Nlive)
    print("done in %d function calls, %d likelihood evals" %
          (nfunccalls, ncalls))
Exemplo n.º 4
0
def test_region_mean_distances():
    np.random.seed(1)
    points = np.random.uniform(0.4, 0.6, size=(10000, 2))
    #points[:,1] *= 0.5
    mask = np.abs((points[:, 0] - 0.5)**2 +
                  (points[:, 1] - 0.5)**2 - 0.08**2) < 0.02**2
    print('circle:', mask.sum())
    points = points[mask]
    mask = points[:, 0] < 0.5
    print('half-circle:', mask.sum())
    points = points[mask]

    transformLayer = AffineLayer(wrapped_dims=[])
    transformLayer.optimize(points, points)
    region = MLFriends(points, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(
        nbootstraps=30)
    print("enlargement factor:", region.enlarge, 1 / region.enlarge)
    region.create_ellipsoid()
    meandist = region.compute_mean_pair_distance()

    t = transformLayer.transform(region.u)
    d = 0
    N = 0
    for i in range(len(t)):
        for j in range(i):
            d += ((t[i, :] - t[j, :])**2).sum()**0.5
            #print(i, j, t[i,:], t[j,:], ((t[i,:] - t[j,:])**2).sum())
            N += 1

    print((meandist, d, N, t))
    assert np.isclose(meandist, d / N), (meandist, d, N)
Exemplo n.º 5
0
def test_clusteringcase_eggbox():
    from ultranest.mlfriends import update_clusters, ScalingLayer, MLFriends
    points = np.loadtxt(os.path.join(here, "eggboxregion.txt"))
    transformLayer = ScalingLayer()
    transformLayer.optimize(points, points)
    region = MLFriends(points, transformLayer)
    maxr = region.compute_maxradiussq(nbootstraps=30)
    assert 1e-10 < maxr < 5e-10
    print('maxradius:', maxr)
    nclusters, clusteridxs, overlapped_points = update_clusters(
        points, points, maxr)
    # plt.title('nclusters: %d' % nclusters)
    # for i in np.unique(clusteridxs):
    #    x, y = points[clusteridxs == i].transpose()
    #    plt.scatter(x, y)
    # plt.savefig('testclustering_eggbox.pdf', bbox_inches='tight')
    # plt.close()
    assert 14 < nclusters < 20, nclusters
Exemplo n.º 6
0
def run_aharm_sampler():
    for seed in [733] + list(range(10)):
        print()
        print("SEED=%d" % seed)
        print()
        np.random.seed(seed)
        nsteps = max(1, int(10**np.random.uniform(0, 3)))
        Nlive = int(10**np.random.uniform(1.5, 3))
        print("Nlive=%d nsteps=%d" % (Nlive, nsteps))
        sampler = AHARMSampler(nsteps, adaptive_nsteps=False, region_filter=False)
        us = np.random.uniform(0.6, 0.8, size=(4000, 2))
        Ls = loglike_vectorized(us)
        i = np.argsort(Ls)[-Nlive:]
        us = us[i,:]
        Ls = Ls[i]
        Lmin = Ls.min()
        
        transformLayer = ScalingLayer()
        transformLayer.optimize(us, us)
        region = MLFriends(us, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement()
        region.create_ellipsoid()
        nfunccalls = 0
        ncalls = 0
        while True:
            u, p, L, nc = sampler.__next__(region, Lmin, us, Ls, transform, loglike)
            nfunccalls += 1
            ncalls += nc
            if u is not None:
                break
            if nfunccalls > 100 + nsteps:
                assert False, ('infinite loop?', seed, nsteps, Nlive)
        print("done in %d function calls, %d likelihood evals" % (nfunccalls, ncalls))
Exemplo n.º 7
0
def test_overclustering_eggbox_txt():
    from ultranest.mlfriends import update_clusters, ScalingLayer, MLFriends
    np.random.seed(1)
    for i in [20, 23, 24, 27, 49]:
        print()
        print("==== TEST CASE %d =====================" % i)
        print()
        points = np.loadtxt(os.path.join(here, "overclustered_u_%d.txt" % i))

        for k in range(3):
            transformLayer = ScalingLayer(wrapped_dims=[])
            transformLayer.optimize(points, points)
            region = MLFriends(points, transformLayer)
            maxr = region.compute_maxradiussq(nbootstraps=30)
            region.maxradiussq = maxr
            nclusters = transformLayer.nclusters

            print("manual: r=%e nc=%d" % (region.maxradiussq, nclusters))
            # assert 1e-10 < maxr < 5e-10
            nclusters, clusteridxs, overlapped_points = update_clusters(
                points, points, maxr)
            print("reclustered: nc=%d" % (nclusters))

        if False:
            plt.title('nclusters: %d' % nclusters)
            for k in np.unique(clusteridxs):
                x, y = points[clusteridxs == k].transpose()
                plt.scatter(x, y)
            plt.savefig('testoverclustering_eggbox_%d.pdf' % i,
                        bbox_inches='tight')
            plt.close()
        assert 14 < nclusters < 20, (nclusters, i)

        for j in range(3):
            nclusters, clusteridxs, overlapped_points = update_clusters(
                points, points, maxr)
            assert 14 < nclusters < 20, (nclusters, i)
Exemplo n.º 8
0
def test_ellipsoid_bracket(plot=False):
    for seed in range(20):
        print("seed:", seed)
        np.random.seed(seed)
        if seed % 2 == 0:
            us = np.random.normal(size=(2**np.random.randint(3, 10), 2))
            us /= ((us**2).sum(axis=1)**0.5).reshape((-1, 1))
            us = us * 0.1 + 0.5
        else:
            us = np.random.uniform(size=(2**np.random.randint(3, 10), 2))

        if plot:
            import matplotlib.pyplot as plt
            plt.plot(us[:, 0], us[:, 1], 'o ', ms=2)

        transformLayer = ScalingLayer()
        region = MLFriends(us, transformLayer)
        try:
            region.maxradiussq, region.enlarge = region.compute_enlargement()
            region.create_ellipsoid()
        except ValueError:
            continue

        print(region.ellipsoid_center)
        print(region.enlarge)
        print(region.ellipsoid_cov)
        print(region.ellipsoid_invcov)
        print(region.ellipsoid_axes)
        print(region.ellipsoid_inv_axes)

        ucurrent = np.array([2**0.5 * 0.1 / 2 + 0.5, 2**0.5 * 0.1 / 2 + 0.5])
        ucurrent = np.array([0.4, 0.525])
        v = np.array([1., 0])
        if plot: plt.plot(ucurrent[0], ucurrent[1], 'o')
        print("from", ucurrent, "in direction", v)
        left, right = ellipsoid_bracket(ucurrent, v, region.ellipsoid_center,
                                        region.ellipsoid_inv_axes,
                                        region.enlarge)
        uleft = ucurrent + v * left
        uright = ucurrent + v * right

        if plot:
            plt.plot([uleft[0], uright[0]], [uleft[1], uright[1]], 'x-')

            plt.savefig('test_ellipsoid_bracket.pdf', bbox_inches='tight')
            plt.close()
        print("ellipsoid bracket:", left, right)
        assert left <= 0, left
        assert right >= 0, right

        assert_point_touches_ellipsoid(ucurrent, v, left,
                                       region.ellipsoid_center,
                                       region.ellipsoid_invcov, region.enlarge)
        assert_point_touches_ellipsoid(ucurrent, v, right,
                                       region.ellipsoid_center,
                                       region.ellipsoid_invcov, region.enlarge)
Exemplo n.º 9
0
def make_region(ndim):
    us = np.random.uniform(size=(1000, ndim))
    if ndim > 1:
        transformLayer = AffineLayer()
    else:
        transformLayer = ScalingLayer()
    transformLayer.optimize(us, us)
    region = MLFriends(us, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(
        nbootstraps=30)
    region.create_ellipsoid(minvol=1.0)
    return region
Exemplo n.º 10
0
def test_region_ellipsoid(plot=False):
    np.random.seed(1)
    points = np.random.uniform(0.4, 0.6, size=(1000, 2))
    points[:, 1] *= 0.5

    transformLayer = AffineLayer(wrapped_dims=[])
    transformLayer.optimize(points, points)
    region = MLFriends(points, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(
        nbootstraps=30)
    print("enlargement factor:", region.enlarge, 1 / region.enlarge)
    region.create_ellipsoid()
    nclusters = transformLayer.nclusters
    assert nclusters == 1

    bpts = np.random.uniform(size=(100, 2))
    mask = region.inside_ellipsoid(bpts)

    d = (bpts - region.ellipsoid_center)
    mask2 = np.einsum('ij,jk,ik->i', d, region.ellipsoid_invcov,
                      d) <= region.enlarge

    assert_allclose(mask, mask2)
Exemplo n.º 11
0
def test_reversible_gradient(plot=False):
    def loglike(x):
        x, y = x.transpose()
        return -0.5 * (x**2 + ((y - 0.5) / 0.2)**2)

    def transform(u):
        return u

    Lmin = -0.5

    for i in [84] + list(range(1, 100)):
        print("setting seed = %d" % i)
        np.random.seed(i)
        points = np.random.uniform(size=(10000, 2))
        L = loglike(points)
        mask = L > Lmin
        points = points[mask, :][:100, :]
        active_u = points
        active_values = L[mask][:100]

        transformLayer = AffineLayer(wrapped_dims=[])
        transformLayer.optimize(points, points)
        region = MLFriends(points, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement(
            nbootstraps=30)
        region.create_ellipsoid()
        nclusters = transformLayer.nclusters
        assert nclusters == 1
        assert np.allclose(region.unormed,
                           region.transformLayer.transform(
                               points)), "transform should be reproducible"
        assert region.inside(
            points).all(), "live points should lie near live points"

        if i == 84:
            v = np.array([0.03477044, -0.01977415])
            reflpoint = np.array([0.09304075, 0.29114574])
        elif i == 4:
            v = np.array([0.03949306, -0.00634806])
            reflpoint = np.array([0.9934771, 0.55358031])

        else:
            v = np.random.normal(size=2)
            v /= (v**2).sum()**0.5
            v *= 0.04
            j = np.random.randint(len(active_u))
            reflpoint = np.random.normal(active_u[j, :], 0.04)
            if not (reflpoint < 1).all() and not (reflpoint > 0).all():
                continue

        bpts = region.transformLayer.transform(reflpoint).reshape((1, -1))
        tt = get_sphere_tangents(region.unormed, bpts)
        t = region.transformLayer.untransform(tt * 1e-3 +
                                              region.unormed) - region.u
        # compute new vector
        normal = t / norm(t, axis=1).reshape((-1, 1))
        print("reflecting at  ", reflpoint, "with direction", v)
        mask_forward1, angles, anglesnew = get_reflection_angles(normal, v)
        if mask_forward1.any():
            j = np.argmin(
                ((region.unormed[mask_forward1, :] - bpts)**2).sum(axis=1))
            k = np.arange(len(normal))[mask_forward1][j]
            angles_used = angles[k]
            normal_used = normal[k, :]
            print("chose normal", normal_used, k)
            #chosen_point = region.u[k,:]
            vnew = -(v - 2 * angles_used * normal_used)
            assert vnew.shape == v.shape

            mask_forward2, angles2, anglesnew2 = get_reflection_angles(
                normal, vnew)
            #j2 = np.argmin(((region.unormed[mask_forward2,:] - bpts)**2).sum(axis=1))
            #chosen_point2 = region.u[mask_forward2,:][0,:]
            #assert j2 == j, (j2, j)
            assert mask_forward2[k]
            #assert_allclose(chosen_point, chosen_point2)

            #for m, a, b, m2, a2, b2 in zip(mask_forward1, angles, anglesnew, mask_forward2, angles2, anglesnew2):
            #    if m != m2:
            #        print('  ', m, a, b, m2, a2, b2)

            #print("using normal", normal)
            #print("changed v from", v, "to", vnew)

            #angles2 = -(normal * (vnew / norm(vnew))).sum(axis=1)
            #mask_forward2 = angles < 0
            if plot:
                plt.figure(figsize=(5, 5))
                plt.title('%d' % mask_forward1.sum())
                plt.plot((reflpoint + v)[0], (reflpoint + v)[1],
                         '^',
                         color='orange')
                plt.plot((reflpoint + vnew)[:, 0], (reflpoint + vnew)[:, 1],
                         '^ ',
                         color='lime')
                plt.plot(reflpoint[0], reflpoint[1], '^ ', color='r')
                plt.plot(region.u[:, 0], region.u[:, 1], 'x ', ms=2, color='k')
                plt.plot(region.u[mask_forward1, 0],
                         region.u[mask_forward1, 1],
                         'o ',
                         ms=6,
                         mfc='None',
                         mec='b')
                plt.plot(region.u[mask_forward2, 0],
                         region.u[mask_forward2, 1],
                         's ',
                         ms=8,
                         mfc='None',
                         mec='g')
                plt.xlim(0, 1)
                plt.ylim(0, 1)
                plt.savefig('test_flatnuts_reversible_gradient_%d.png' % i,
                            bbox_inches='tight')
                plt.close()
            assert mask_forward1[k] == mask_forward2[k], (mask_forward1[k],
                                                          mask_forward2[k])

            print("reflecting at  ", reflpoint, "with direction", v)
            # make that step, then try to go back
            j = np.arange(len(normal))[mask_forward1][0]
            normal = normal[j, :]
            angles = (normal * (v / norm(v))).sum()
            v2 = v - 2 * angle(normal, v) * normal

            print("reflecting with", normal, "new direction", v2)

            #newpoint = reflpoint + v2
            #angles2 = (normal * (v2 / norm(v2))).sum()
            v3 = v2 - 2 * angle(normal, v2) * normal

            print("re-reflecting gives direction", v3)
            assert_allclose(v3, v)

            print()
            print("FORWARD:", v, reflpoint)
            samplingpath = SamplingPath(reflpoint - v, v, active_values[0])
            contourpath = ContourSamplingPath(samplingpath, region)
            normal = contourpath.gradient(reflpoint)
            if normal is not None:
                assert normal.shape == v.shape, (normal.shape, v.shape)

                print("BACKWARD:", v, reflpoint)
                v2 = -(v - 2 * angle(normal, v) * normal)
                normal2 = contourpath.gradient(reflpoint)
                assert_allclose(normal, normal2)
                normal2 = normal
                v3 = -(v2 - 2 * angle(normal2, v2) * normal2)
                assert_allclose(v3, v)
Exemplo n.º 12
0
def test_detailed_balance():
    def loglike(x):
        x, y = x.transpose()
        return -0.5 * (x**2 + ((y - 0.5) / 0.2)**2)

    def transform(u):
        return u

    Lmin = -0.5
    for i in range(1, 100):
        print()
        print("---- seed=%d ----" % i)
        print()
        np.random.seed(i)
        points = np.random.uniform(size=(10000, 2))
        L = loglike(points)
        mask = L > Lmin
        points = points[mask, :][:400, :]
        active_u = points
        active_values = L[mask][:400]

        transformLayer = AffineLayer(wrapped_dims=[])
        transformLayer.optimize(points, points)
        region = MLFriends(points, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement(
            nbootstraps=30)
        region.create_ellipsoid()
        nclusters = transformLayer.nclusters
        assert nclusters == 1
        assert np.allclose(region.unormed,
                           region.transformLayer.transform(
                               points)), "transform should be reproducible"
        assert region.inside(
            points).all(), "live points should lie near live points"

        v = np.random.normal(size=2)
        v /= (v**2).sum()**0.5
        v *= 0.04

        print("StepSampler ----")
        print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0])
        samplingpath = SamplingPath(active_u[0], v, active_values[0])
        problem = dict(loglike=loglike, transform=transform, Lmin=Lmin)
        sampler = ClockedStepSampler(ContourSamplingPath(samplingpath, region))
        check_starting_point(sampler, active_u[0], active_values[0], **problem)
        sampler.expand_onestep(fwd=True, **problem)
        sampler.expand_onestep(fwd=True, **problem)
        sampler.expand_onestep(fwd=True, **problem)
        sampler.expand_onestep(fwd=True, **problem)
        sampler.expand_onestep(fwd=False, **problem)
        sampler.expand_to_step(4, **problem)
        sampler.expand_to_step(-4, **problem)
        check_starting_point(sampler, active_u[0], active_values[0], **problem)

        starti, startx, startv, startL = max(sampler.points)

        print()
        print("BACKWARD SAMPLING FROM", starti, startx, startv, startL)
        samplingpath2 = SamplingPath(startx, -startv, startL)
        sampler2 = ClockedStepSampler(
            ContourSamplingPath(samplingpath2, region))
        check_starting_point(sampler2, startx, startL, **problem)
        sampler2.expand_to_step(starti, **problem)
        check_starting_point(sampler2, startx, startL, **problem)

        starti2, startx2, startv2, startL2 = max(sampler2.points)
        assert_allclose(active_u[0], startx2)
        assert_allclose(v, -startv2)

        starti, startx, startv, startL = min(sampler.points)
        print()
        print("BACKWARD SAMPLING FROM", starti, startx, startv, startL)
        samplingpath3 = SamplingPath(startx, startv, startL)
        sampler3 = ClockedStepSampler(
            ContourSamplingPath(samplingpath3, region))
        check_starting_point(sampler3, startx, startL, **problem)
        sampler3.expand_to_step(-starti, **problem)
        check_starting_point(sampler3, startx, startL, **problem)

        starti3, startx3, startv3, startL3 = max(sampler3.points)
        assert_allclose(active_u[0], startx3)
        assert_allclose(v, startv3)
        print()

        print("BisectSampler ----")
        log = dict(log=True)
        print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0])
        samplingpath = SamplingPath(active_u[0], v, active_values[0])
        sampler = ClockedBisectSampler(
            ContourSamplingPath(samplingpath, region), **log)
        check_starting_point(sampler, active_u[0], active_values[0], **problem)
        sampler.expand_to_step(10, **problem)
        check_starting_point(sampler, active_u[0], active_values[0], **problem)

        starti, startx, startv, startL = max(sampler.points)
        print()
        print("BACKWARD SAMPLING FROM", starti, startx, startv, startL)
        samplingpath2 = SamplingPath(startx, -startv, startL)
        sampler2 = ClockedBisectSampler(
            ContourSamplingPath(samplingpath2, region), **log)
        check_starting_point(sampler2, startx, startL, **problem)
        sampler2.expand_to_step(starti, **problem)
        check_starting_point(sampler2, startx, startL, **problem)

        starti2, startx2, startv2, startL2 = max(sampler2.points)
        if gap_free_path(sampler, 0, starti, **problem) and gap_free_path(
                sampler2, 0, starti2, **problem):
            assert_allclose(active_u[0], startx2)
            assert_allclose(v, -startv2)

        starti, startx, startv, startL = min(sampler.points)
        print()
        print("BACKWARD SAMPLING FROM", starti, startx, startv, startL)
        samplingpath3 = SamplingPath(startx, -startv, startL)
        sampler3 = ClockedBisectSampler(
            ContourSamplingPath(samplingpath3, region), **log)
        check_starting_point(sampler3, startx, startL, **problem)
        sampler3.expand_to_step(starti, **problem)
        check_starting_point(sampler3, startx, startL, **problem)

        starti3, startx3, startv3, startL3 = min(sampler3.points)
        if gap_free_path(sampler, 0, starti, **problem) and gap_free_path(
                sampler3, 0, starti3, **problem):
            assert_allclose(active_u[0], startx3)
            assert_allclose(v, -startv3)
        print()

        print("NUTSSampler ----")
        print("FORWARD SAMPLING FROM", 0, active_u[0], v, active_values[0])
        samplingpath = SamplingPath(active_u[0], v, active_values[0])
        np.random.seed(i)
        sampler = ClockedNUTSSampler(ContourSamplingPath(samplingpath, region))
        sampler.get_independent_sample(**problem)
Exemplo n.º 13
0
def test_ellipsoids():
    tpoints = np.random.uniform(0.4, 0.6, size=(1000, 1))
    tregion = WrappingEllipsoid(tpoints)
    print(tregion.variable_dims)
    tregion.enlarge = tregion.compute_enlargement(nbootstraps=30)
    tregion.create_ellipsoid()

    for umax in 0.6, 0.5:
        print()
        print(umax)
        points = np.random.uniform(0.4, 0.6, size=(1000, 3))
        points = points[points[:, 0] < umax]
        tpoints = points * 10
        tpoints[:, 0] = np.floor(tpoints[:, 0])
        print(points, tpoints)

        transformLayer = AffineLayer(wrapped_dims=[])
        transformLayer.optimize(points, points)

        region = MLFriends(points, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement(
            nbootstraps=30)
        region.create_ellipsoid()
        inside = region.inside(points)
        assert inside.shape == (len(points), ), (inside.shape, points.shape)
        assert inside.all()

        region = RobustEllipsoidRegion(points, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement(
            nbootstraps=30)
        region.create_ellipsoid()
        inside = region.inside(points)
        assert inside.shape == (len(points), ), (inside.shape, points.shape)
        assert inside.all()

        region = SimpleRegion(points, transformLayer)
        region.maxradiussq, region.enlarge = region.compute_enlargement(
            nbootstraps=30)
        region.create_ellipsoid()
        inside = region.inside(points)
        assert inside.shape == (len(points), ), (inside.shape, points.shape)
        assert inside.all()

        tregion = WrappingEllipsoid(tpoints)
        print(tregion.variable_dims)
        tregion.enlarge = tregion.compute_enlargement(nbootstraps=30)
        tregion.create_ellipsoid()
        inside = tregion.inside(tpoints)
        assert inside.shape == (len(tpoints), ), (inside.shape, tpoints.shape)
        assert inside.all()
Exemplo n.º 14
0
def evaluate_warmed_sampler(problemname, ndim, nlive, nsteps, sampler):
    loglike, grad, volume, warmup = get_problem(problemname, ndim=ndim)
    if hasattr(sampler, 'set_gradient'):
        sampler.set_gradient(grad)
    np.random.seed(1)
    def multi_loglike(xs):
        return np.asarray([loglike(x) for x in xs])
    us = np.array([warmup(ndim) for i in range(nlive)])
    Ls = np.array([loglike(u) for u in us])
    vol0 = max((volume(Li, ndim) for Li in Ls))
    nwarmup = 3 * nlive
    
    if ndim > 1:
        transformLayer = AffineLayer()
    else:
        transformLayer = ScalingLayer()
    transformLayer.optimize(us, us)
    region = MLFriends(us, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(nbootstraps=30)
    region.create_ellipsoid(minvol=vol0)
    assert region.ellipsoid_center is not None
    sampler.region_changed(Ls, region)
    
    Lsequence = []
    stepsequence = []
    ncalls = 0
    for i in tqdm.trange(nsteps + nwarmup):
        if i % int(nlive * 0.2) == 0:
            minvol = (1 - 1./nlive)**i * vol0
            with warnings.catch_warnings(), np.errstate(all='raise'):
                try:
                    nextTransformLayer = transformLayer.create_new(us, region.maxradiussq, minvol=minvol)
                    nextregion = MLFriends(us, nextTransformLayer)
                    nextregion.maxradiussq, nextregion.enlarge = nextregion.compute_enlargement(nbootstraps=30)
                    if nextregion.estimate_volume() <= region.estimate_volume():
                        nextregion.create_ellipsoid(minvol=minvol)
                        region = nextregion
                        transformLayer = region.transformLayer
                        assert region.ellipsoid_center is not None
                        sampler.region_changed(Ls, region)
                except Warning as w:
                    print("not updating region because: %s" % w)
                except FloatingPointError as e:
                    print("not updating region because: %s" % e)
                except np.linalg.LinAlgError as e:
                    print("not updating region because: %s" % e)
        
        # replace lowest likelihood point
        j = np.argmin(Ls)
        Lmin = float(Ls[j])
        while True:
            u, v, logl, nc = sampler.__next__(region, Lmin, us, Ls, transform, multi_loglike)
            if i > nwarmup:
                ncalls += nc
            if logl is not None:
                assert np.isfinite(u).all(), u
                assert np.isfinite(v).all(), v
                assert np.isfinite(logl), logl
                break
        
        if i > nwarmup:
            Lsequence.append(Lmin)
            stepsequence.append(quantify_step(us[sampler.starti,:], u))

        us[j,:] = u
        Ls[j] = logl
    
    Lsequence = np.asarray(Lsequence)
    return Lsequence, ncalls, np.array(stepsequence)
Exemplo n.º 15
0
def benchmark_transform():
    npts = 400
    for layer in 'scale', 'affine':
        print(" ndim | duration  [%s]" % layer)
        tplotpoints = []
        rplotpoints = []
        nplotpoints = []
        for ndim in 2, 4, 8, 16, 32, 64, 128, 256, :
            np.random.seed(ndim)
            points = np.random.uniform(0.4, 0.6, size=(npts, ndim))
            transformLayer = ScalingLayer(
            ) if layer == 'scale' else AffineLayer()
            region = MLFriends(points, transformLayer)
            region.maxradiussq, region.enlarge = region.compute_enlargement(
                nbootstraps=30)
            region.create_ellipsoid()

            niter = 0
            total_duration = 0
            while total_duration < .1:
                start = time.time()
                u = region.transformLayer.untransform(
                    np.random.normal(size=(ndim)))
                region.transformLayer.transform(u)
                total_duration += time.time() - start
                niter += 1
            print('%5d | %.2fms ' % (ndim, total_duration * 1000 / niter))
            tplotpoints.append((ndim, total_duration * 1000 / niter))

            niter = 0
            total_duration = 0
            while total_duration < .1:
                u = np.random.normal(0.5, 0.1, size=(10, ndim))
                start = time.time()
                region.inside(u)
                total_duration += time.time() - start
                niter += 1
            print('%5d |          %.2fms ' %
                  (ndim, total_duration * 1000 / niter))
            rplotpoints.append((ndim, total_duration * 1000 / niter))

            niter = 0
            total_duration = 0
            while total_duration < .1:
                u = np.random.normal(0.5, 0.1, size=(10, ndim))
                start = time.time()
                array = np.empty((10), dtype=int)
                array[:] = -1
                array = np.empty((10), dtype=int)
                array[:] = -1
                array = np.empty((10), dtype=int)
                array[:] = -1
                total_duration += time.time() - start
                niter += 1
            print('%5d |                 %.2fms ' %
                  (ndim, total_duration * 1000 / niter))
            nplotpoints.append((ndim, total_duration * 1000 / niter))
        plt.plot(*zip(*tplotpoints), label=layer + ' transform')
        plt.plot(*zip(*rplotpoints), label=layer + ' region.inside')
        plt.plot(*zip(*nplotpoints), label=layer + ' array')

    plt.xlabel('Number of dimensions')
    plt.ylabel('Duration [ms]')
    plt.yscale('log')
    plt.xscale('log')
    plt.legend(loc='best', prop=dict(size=10))
    plt.savefig('testtransform.pdf', bbox_inches='tight')
    plt.close()
Exemplo n.º 16
0
def prepare_problem(problemname, ndim, nlive, sampler):
    loglike, grad, volume, warmup = get_problem(problemname, ndim=ndim)
    if hasattr(sampler, 'set_gradient'):
        sampler.set_gradient(grad)
    np.random.seed(1)
    us = np.random.uniform(size=(nlive, ndim))

    if ndim > 1:
        transformLayer = AffineLayer()
    else:
        transformLayer = ScalingLayer()
    transformLayer.optimize(us, us)
    region = MLFriends(us, transformLayer)
    region.maxradiussq, region.enlarge = region.compute_enlargement(
        nbootstraps=30)
    region.create_ellipsoid(minvol=1.0)

    Ls = np.array([loglike(u) for u in us])
    ncalls = 0
    nok = 0
    i = 0
    while True:
        if i % int(nlive * 0.2) == 0:
            minvol = (1 - 1. / nlive)**i
            nextTransformLayer = transformLayer.create_new(us,
                                                           region.maxradiussq,
                                                           minvol=minvol)
            nextregion = MLFriends(us, nextTransformLayer)
            nextregion.maxradiussq, nextregion.enlarge = nextregion.compute_enlargement(
                nbootstraps=30)
            if nextregion.estimate_volume() <= region.estimate_volume():
                region = nextregion
                transformLayer = region.transformLayer
            region.create_ellipsoid(minvol=minvol)

        # replace lowest likelihood point
        j = np.argmin(Ls)
        Lmin = float(Ls[j])
        while True:
            u, v, logl, nc = sampler.__next__(region, Lmin, us, Ls, transform,
                                              loglike)
            ncalls += nc
            if logl is not None:
                break

        us[j, :] = u
        region.u[j, :] = u
        region.unormed[j, :] = region.transformLayer.transform(u)
        Ls[j] = logl
        i = i + 1
        #print(i, Lmin, volume(Lmin, ndim))
        if np.isfinite(volume(Lmin, ndim)):
            nok += 1

        if nok > 2 * nlive + 1000:
            break
    return region, i, Lmin, us, Ls, transform, loglike