Ejemplo n.º 1
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 1.1 / len(pos))
    assert r.N == 1
    assert (r.sum() == len(pos)).all()

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert r.N == len(pos)
    assert (r.sum() == 1).all()
Ejemplo n.º 2
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert_array_equal(r.N, len(pos))
    assert_array_equal(r.sum(), 1)

    r = cluster.fof(dataset, 1.1 / len(pos))

    assert_array_equal(r.N, 1)
    assert_array_equal(r.sum(), len(pos))
Ejemplo n.º 3
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert_array_equal(r.N, len(pos))
    assert_array_equal(r.sum(), 1)

    r = cluster.fof(dataset, 1.1 / len(pos))

    assert_array_equal(r.N, 1)
    assert_array_equal(r.sum(), len(pos))
Ejemplo n.º 4
0
def test_cluster():
    numpy.random.seed(1234)
    dec = numpy.arcsin(numpy.random.uniform(-1, 1, size=100000)) / numpy.pi * 180
    ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180

    # testing bootstrap
    for area, rand, in sphere.bootstrap(4, (ra, dec), 41252.96 / len(dec)):
        pass

    dataset = sphere.points(ra, dec)

    r = cluster.fof(dataset, 0.00001, np=None)

    assert r.N == len(dataset)

    binning = sphere.FastAngularBinning(numpy.linspace(0, 1.0, 10))
    binning1 = sphere.AngularBinning(numpy.linspace(0, 1.0, 10))
    binningR = correlate.RBinning(binning.edges)

    r = correlate.paircount(dataset, dataset, binning=binning)
    r1 = correlate.paircount(dataset, dataset, binning=binning1, compute_mean_coords=True)

    r2 = correlate.paircount(dataset, dataset, binning=binningR)

    # make sure mean_centers compute angular centers
    for i, val in enumerate(r1.mean_centers):
        assert binning.angular_edges[i] < val < binning.angular_edges[i+1]
    assert_equal(r1.sum1, r2.sum1)
    assert_equal(r1.sum1, r.sum1)
    assert_allclose(
    r.sum1,
    numpy.diff(2 * numpy.pi * (1 - numpy.cos(numpy.radians(binning.angular_edges)))) / ( 4 * numpy.pi) * len(ra) ** 2, rtol=10e-2)
Ejemplo n.º 5
0
def test_cluster_empty():

    pos = numpy.empty((0, 3))
    dataset = cluster.dataset(pos, boxsize=1.0)

    # no error shall be raised
    r = cluster.fof(dataset, 0.8)
Ejemplo n.º 6
0
def test_cluster():
    numpy.random.seed(1234)
    dec = numpy.arcsin(numpy.random.uniform(-1, 1, size=100000)) / numpy.pi * 180
    ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180

    # testing bootstrap 
    for area, rand, in sphere.bootstrap(4, (ra, dec), 41252.96 / len(dec)):
        pass

    dataset = sphere.points(ra, dec)

    r = cluster.fof(dataset, 0.00001, np=None)

    assert r.N == len(dataset)

    binning = sphere.AngularBinning(numpy.linspace(0, 1.0, 10))
    binningR = correlate.RBinning(binning.edges)

    r = correlate.paircount(dataset, dataset, binning=binning, usefast=True)
    r1 = correlate.paircount(dataset, dataset, binning=binning, usefast=False)

    r2 = correlate.paircount(dataset, dataset, binning=binningR, usefast=True)

    assert_equal(r1.sum1, r2.sum1)
    assert_equal(r1.sum1, r.sum1)
    assert_allclose(
    r.sum1,
    numpy.diff(2 * numpy.pi * (1 - numpy.cos(numpy.radians(binning.angular_edges)))) / ( 4 * numpy.pi) * len(ra) ** 2, rtol=10e-2)
Ejemplo n.º 7
0
def test_cluster_empty():

    pos = numpy.empty((0, 3))
    dataset = cluster.dataset(pos, boxsize=1.0)

    # no error shall be raised
    r = cluster.fof(dataset, 0.8)
Ejemplo n.º 8
0
def subfof(pos, vel, ll, vfactor, haloid, Ntot):
    first = pos[0].copy()
    pos -= first
    pos[pos > 0.5]  -= 1.0 
    pos[pos < -0.5] += 1.0 
    pos += first

    oldvel = vel.copy()
    vmean = vel.mean(axis=0, dtype='f8')
    vel -= vmean
    sigma_1d = (vel** 2).mean(dtype='f8') ** 0.5
    vel /= (vfactor * sigma_1d)
    vel *= ll
    data = numpy.concatenate(( pos, vel), axis=1)
    #data = pos

    data = cluster.dataset(data)
    Nsub = 0
    while Nsub == 0:
        fof = cluster.fof(data, linking_length=ll, np=0)
        ll *= 2
        Nsub = (fof.length > 20).sum()

    output = numpy.empty(Nsub, dtype=[
        ('Position', ('f4', 3)),
        ('Velocity', ('f4', 3)),
        ('LinkingLength', 'f4'),
        ('R200', 'f4'),
        ('R1200', 'f4'),
        ('R2400', 'f4'),
        ('R6000', 'f4'),
        ('Length', 'i4'),
        ('HaloID', 'i4'),
        ])

    output['Position'][...] = fof.center()[:Nsub, :3]
    output['Length'][...] = fof.length[:Nsub]
    output['HaloID'][...] = haloid
    output['LinkingLength'][...] = ll

    for i in range(3):
        output['Velocity'][..., i] = fof.sum(oldvel[:, i])[:Nsub] / output['Length']

    del fof
    del data
    data = cluster.dataset(pos)
    for i in range(Nsub):
        center = output['Position'][i] 
        r1 = (1.0 * output['Length'][i] / Ntot) ** 0.3333 * 3
        output['R200'][i] = so(center, data, r1, Ntot, 200.)
        output['R1200'][i] = so(center, data, output['R200'][i] * 0.5, Ntot, 1200.)
        output['R2400'][i] = so(center, data, output['R1200'][i] * 0.5, Ntot, 2400.)
        output['R6000'][i] = so(center, data, output['R2400'][i] * 0.5, Ntot, 6000.)
    return output
Ejemplo n.º 9
0
def local_fof(layout, pos, boxsize, ll, comm):
    N = len(pos)

    pos = layout.exchange(pos)
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += sum(comm.allgather(N)[:comm.rank])

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
Ejemplo n.º 10
0
def local_fof(layout, pos, boxsize, ll, comm):
    N = len(pos)

    pos = layout.exchange(pos)
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += sum(comm.allgather(N)[:comm.rank])

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
Ejemplo n.º 11
0
def _fof_local(layout, pos, boxsize, ll, comm):
    from kdcount import cluster

    N = len(pos)

    pos = layout.exchange(pos)
    if boxsize is not None:
        pos %= boxsize
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += numpy.sum(comm.allgather(N)[:comm.rank], dtype='intp')

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
Ejemplo n.º 12
0
Archivo: fof.py Proyecto: bccp/nbodykit
def _fof_local(layout, pos, boxsize, ll, comm):
    from kdcount import cluster

    N = len(pos)

    pos = layout.exchange(pos)
    if boxsize is not None:
        pos %= boxsize
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += numpy.sum(comm.allgather(N)[:comm.rank], dtype='intp')

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
Ejemplo n.º 13
0
def test_cluster():
    numpy.random.seed(1234)
    dec = numpy.arcsin(numpy.random.uniform(-1, 1,
                                            size=100000)) / numpy.pi * 180
    ra = numpy.random.uniform(0, 2 * numpy.pi, size=100000) / numpy.pi * 180

    # testing bootstrap
    for area, rand, in sphere.bootstrap(4, (ra, dec), 41252.96 / len(dec)):
        pass

    dataset = sphere.points(ra, dec)

    r = cluster.fof(dataset, 0.00001, np=None)

    assert r.N == len(dataset)

    binning = sphere.FastAngularBinning(numpy.linspace(0, 1.0, 10))
    binning1 = sphere.AngularBinning(numpy.linspace(0, 1.0, 10))
    binningR = correlate.RBinning(binning.edges)

    r = correlate.paircount(dataset, dataset, binning=binning)
    r1 = correlate.paircount(dataset,
                             dataset,
                             binning=binning1,
                             compute_mean_coords=True)

    r2 = correlate.paircount(dataset, dataset, binning=binningR)

    # make sure mean_centers compute angular centers
    for i, val in enumerate(r1.mean_centers):
        assert binning.angular_edges[i] < val < binning.angular_edges[i + 1]
    assert_equal(r1.sum1, r2.sum1)
    assert_equal(r1.sum1, r.sum1)
    assert_allclose(
        r.sum1,
        numpy.diff(2 * numpy.pi *
                   (1 - numpy.cos(numpy.radians(binning.angular_edges)))) /
        (4 * numpy.pi) * len(ra)**2,
        rtol=10e-2)
Ejemplo n.º 14
0
def main():
    comm = MPI.COMM_WORLD
    np = split_size_2d(comm.size)

    grid = [
        numpy.linspace(0, 1.0, np[0] + 1, endpoint=True),
        numpy.linspace(0, 1.0, np[1] + 1, endpoint=True),
    ]
    domain = GridND(grid)
    if comm.rank == 0:
        logging.info('grid %s' % str(grid) )

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Position', 'ID'])

    tpos = P['Position']
    tid = P['ID']
    del P

    Ntot = sum(comm.allgather(len(tpos)))

    if comm.rank == 0:
        logging.info('Total number of particles %d, ll %g' % (Ntot, ns.LinkingLength))
    ll = ns.LinkingLength * Ntot ** -0.3333333

    #print pos
    #print ((pos[0] - pos[1]) ** 2).sum()** 0.5, ll
  
    layout = domain.decompose(tpos, smoothing=ll * 1)

    tpos = layout.exchange(tpos)
    tid = layout.exchange(tid)

    logging.info('domain %d has %d particles' % (comm.rank, len(tid)))

    data = cluster.dataset(tpos, boxsize=1.0)
    fof = cluster.fof(data, linking_length=ll, np=0, verbose=True)
    
    # initialize global labels
    minid = equiv_class(fof.labels, tid, op=numpy.fmin)[fof.labels]
    del fof
    del data
    del tpos
    del tid

    while True:
        # merge, if a particle belongs to several ranks
        # use the global label of the minimal
        minid_new = layout.gather(minid, mode=numpy.fmin)
        minid_new = layout.exchange(minid_new)

        # on my rank, these particles have been merged
        merged = minid_new != minid
        # if no rank has merged any, we are done
        # gl is the global label (albeit with some holes)
        total = comm.allreduce(merged.sum())
            
        if comm.rank == 0:
            print 'merged ', total, 'halos'

        if total == 0:
            del minid_new
            break
        old = minid[merged]
        new = minid_new[merged]
        arg = old.argsort()
        new = new[arg]
        old = old[arg]
        replacesorted(minid, old, new, out=minid)

    minid = layout.gather(minid, mode=numpy.fmin)

    label = assign_halo_label(minid, comm, thresh=ns.nmin) 

    N = halos.count(label, comm=comm)

    if comm.rank == 0:
        print 'total halos is', len(N)

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Position'])

    hpos = halos.centerofmass(label, P['Position'], boxsize=1.0, comm=comm)

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Velocity'])

    hvel = halos.centerofmass(label, P['Velocity'], boxsize=None, comm=comm)

    if comm.rank == 0:
        print N
        print 'total groups', N.shape
        print 'total particles', N.sum()
        print 'above ', ns.nmin, (N >ns.nmin).sum()
        N[0] = -1
        with open(ns.output + '.halo', 'w') as ff:
            numpy.int32(len(N)).tofile(ff)
            numpy.float32(ns.LinkingLength).tofile(ff)
            numpy.int32(N).tofile(ff)
            numpy.float32(hpos).tofile(ff)
            numpy.float32(hvel).tofile(ff)
        print hpos
    del N
    del hpos

    npart = None
    if comm.rank == 0:
        snapshot = Snapshot(ns.filename,TPMSnapshotFile)
        npart = snapshot.npart
        for i in range(len(snapshot.npart)):
            with open(ns.output + '.grp.%02d' % i, 'w') as ff:
                numpy.int32(npart[i]).tofile(ff)
                numpy.float32(ns.LinkingLength).tofile(ff)
                pass
    npart = comm.bcast(npart)

    start = sum(comm.allgather(len(label))[:comm.rank])
    end = sum(comm.allgather(len(label))[:comm.rank+1])
    label = numpy.int32(label)
    written = 0
    for i in range(len(npart)):
        filestart = sum(npart[:i])
        fileend = sum(npart[:i+1])
        mystart = start - filestart
        myend = end - filestart
        if myend <= 0 : continue
        if mystart >= npart[i] : continue
        if myend > npart[i]: myend = npart[i]
        if mystart < 0: mystart = 0
        with open(ns.output + '.grp.%02d' % i, 'r+') as ff:
            ff.seek(8, 0)
            ff.seek(mystart * 4, 1)
            label[written:written + myend - mystart].tofile(ff)
        written += myend - mystart

    return
Ejemplo n.º 15
0
def local_fof(pos, ll):
    data = cluster.dataset(pos, boxsize=1.0)
    fof = cluster.fof(data, linking_length=ll, np=0, verbose=True)
    labels = fof.labels
    return labels
Ejemplo n.º 16
0
def subfof(pos, vel, ll, vfactor, haloid, Ntot, boxsize):
    nbar = Ntot / boxsize.prod()
    first = pos[0].copy()
    pos -= first
    pos /= boxsize
    pos[pos > 0.5]  -= 1.0 
    pos[pos < -0.5] += 1.0 
    pos *= boxsize
    pos += first

    oldvel = vel.copy()
    vmean = vel.mean(axis=0, dtype='f8')
    vel -= vmean
    sigma_1d = (vel** 2).mean(dtype='f8') ** 0.5
    vel /= (vfactor * sigma_1d)
    vel *= ll
    data = numpy.concatenate(( pos, vel), axis=1)
    #data = pos

    data = cluster.dataset(data)
    Nsub = 0
    thresh = 80
    fof = cluster.fof(data, linking_length=ll, np=0)

    while Nsub == 0 and thresh > 1:
        # reducing the threshold till we find something..
        Nsub = (fof.length > thresh).sum()
        thresh *= 0.9
    # if nothing is found then assume this FOF group is a fluke.
 
    output = numpy.empty(Nsub, dtype=[
        ('Position', ('f4', 3)),
        ('Velocity', ('f4', 3)),
        ('LinkingLength', 'f4'),
        ('R200', 'f4'),
        ('R500', 'f4'),
        ('R1200', 'f4'),
        ('R2400', 'f4'),
        ('R6000', 'f4'),
        ('Length', 'i4'),
        ('HaloID', 'i4'),
        ])

    output['Position'][...] = fof.center()[:Nsub, :3]
    output['Length'][...] = fof.length[:Nsub]
    output['HaloID'][...] = haloid
    output['LinkingLength'][...] = ll

    for i in range(3):
        output['Velocity'][..., i] = fof.sum(oldvel[:, i])[:Nsub] / output['Length']

    del fof
    del data
    data = cluster.dataset(pos)

    for i in range(Nsub):
        center = output['Position'][i] 
        rmax = (((pos - center) ** 2).sum(axis=-1) ** 0.5).max()
        r1 = rmax
        output['R200'][i] = so(center, data, r1, nbar, 200.)
        output['R500'][i] = so(center, data, r1, nbar, 500.)
        output['R1200'][i] = so(center, data, output['R200'][i] * 0.5, nbar, 1200.)
        output['R2400'][i] = so(center, data, output['R1200'][i] * 0.5, nbar, 2400.)
        output['R6000'][i] = so(center, data, output['R2400'][i] * 0.5, nbar, 6000.)
    output.sort(order=['Length', 'Position'])
    print(output)
    output = output[::-1]
    return output