示例#1
0
def subfof(pos, vel, ll, vfactor, haloid, Ntot):
    first = pos[0].copy()
    pos -= first
    pos[pos > 0.5]  -= 1.0 
    pos[pos < -0.5] += 1.0 
    pos += first

    oldvel = vel.copy()
    vmean = vel.mean(axis=0, dtype='f8')
    vel -= vmean
    sigma_1d = (vel** 2).mean(dtype='f8') ** 0.5
    vel /= (vfactor * sigma_1d)
    vel *= ll
    data = numpy.concatenate(( pos, vel), axis=1)
    #data = pos

    data = cluster.dataset(data)
    Nsub = 0
    while Nsub == 0:
        fof = cluster.fof(data, linking_length=ll, np=0)
        ll *= 2
        Nsub = (fof.length > 20).sum()

    output = numpy.empty(Nsub, dtype=[
        ('Position', ('f4', 3)),
        ('Velocity', ('f4', 3)),
        ('LinkingLength', 'f4'),
        ('R200', 'f4'),
        ('R1200', 'f4'),
        ('R2400', 'f4'),
        ('R6000', 'f4'),
        ('Length', 'i4'),
        ('HaloID', 'i4'),
        ])

    output['Position'][...] = fof.center()[:Nsub, :3]
    output['Length'][...] = fof.length[:Nsub]
    output['HaloID'][...] = haloid
    output['LinkingLength'][...] = ll

    for i in range(3):
        output['Velocity'][..., i] = fof.sum(oldvel[:, i])[:Nsub] / output['Length']

    del fof
    del data
    data = cluster.dataset(pos)
    for i in range(Nsub):
        center = output['Position'][i] 
        r1 = (1.0 * output['Length'][i] / Ntot) ** 0.3333 * 3
        output['R200'][i] = so(center, data, r1, Ntot, 200.)
        output['R1200'][i] = so(center, data, output['R200'][i] * 0.5, Ntot, 1200.)
        output['R2400'][i] = so(center, data, output['R1200'][i] * 0.5, Ntot, 2400.)
        output['R6000'][i] = so(center, data, output['R2400'][i] * 0.5, Ntot, 6000.)
    return output
示例#2
0
def so(center, data, r1, nbar, thresh=200):
    center = numpy.array([center])
    dcenter = cluster.dataset(center)
    def delta(r):
        if r == 0:
            return numpy.nan
        N = data.tree.count(dcenter.tree, [r])[0][0]
        n = N / (4 / 3 * numpy.pi * r ** 3)
        return 1.0 * n / nbar - 1
     
    d1 = delta(r1)

    while d1 > thresh:
        r1 *= 1.4
        d1 = delta(r1)
    # d1 < 200
    r2 = r1
    d2 = d1
    while d2 < thresh:
        r2 *= 0.7
        d2 = delta(r2)
    # d2 > 200

    while True:
        r = (r1 * r2) ** 0.5
        d = delta(r)
        x = (d - thresh)
        if x > 0.1:
            r2 = r
        elif x < -0.1:
            r1 = r
        else:
            return r
示例#3
0
def test_cluster_empty():

    pos = numpy.empty((0, 3))
    dataset = cluster.dataset(pos, boxsize=1.0)

    # no error shall be raised
    r = cluster.fof(dataset, 0.8)
示例#4
0
def test_cluster_empty():

    pos = numpy.empty((0, 3))
    dataset = cluster.dataset(pos, boxsize=1.0)

    # no error shall be raised
    r = cluster.fof(dataset, 0.8)
示例#5
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 1.1 / len(pos))
    assert r.N == 1
    assert (r.sum() == len(pos)).all()

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert r.N == len(pos)
    assert (r.sum() == 1).all()
示例#6
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert_array_equal(r.N, len(pos))
    assert_array_equal(r.sum(), 1)

    r = cluster.fof(dataset, 1.1 / len(pos))

    assert_array_equal(r.N, 1)
    assert_array_equal(r.sum(), len(pos))
示例#7
0
def test_cluster():

    pos = numpy.linspace(0, 1, 10000, endpoint=False).reshape(-1, 1)
    dataset = cluster.dataset(pos, boxsize=1.0)

    r = cluster.fof(dataset, 0.8 / len(pos))
    assert_array_equal(r.N, len(pos))
    assert_array_equal(r.sum(), 1)

    r = cluster.fof(dataset, 1.1 / len(pos))

    assert_array_equal(r.N, 1)
    assert_array_equal(r.sum(), len(pos))
示例#8
0
def local_fof(layout, pos, boxsize, ll, comm):
    N = len(pos)

    pos = layout.exchange(pos)
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += sum(comm.allgather(N)[:comm.rank])

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
示例#9
0
def local_fof(layout, pos, boxsize, ll, comm):
    N = len(pos)

    pos = layout.exchange(pos)
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += sum(comm.allgather(N)[:comm.rank])

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
示例#10
0
文件: fof.py 项目: twobombs/nbodykit
def _fof_local(layout, pos, boxsize, ll, comm):
    from kdcount import cluster

    N = len(pos)

    pos = layout.exchange(pos)
    if boxsize is not None:
        pos %= boxsize
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += numpy.sum(comm.allgather(N)[:comm.rank], dtype='intp')

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
示例#11
0
文件: fof.py 项目: bccp/nbodykit
def _fof_local(layout, pos, boxsize, ll, comm):
    from kdcount import cluster

    N = len(pos)

    pos = layout.exchange(pos)
    if boxsize is not None:
        pos %= boxsize
    data = cluster.dataset(pos, boxsize=boxsize)
    fof = cluster.fof(data, linking_length=ll, np=0)
    labels = fof.labels
    del fof

    PID = numpy.arange(N, dtype='intp')
    PID += numpy.sum(comm.allgather(N)[:comm.rank], dtype='intp')

    PID = layout.exchange(PID)
    # initialize global labels
    minid = equiv_class(labels, PID, op=numpy.fmin)[labels]

    return minid
示例#12
0
def so(center, data, r1, nbar, thresh=200):
    center = numpy.array([center])
    dcenter = cluster.dataset(center)

    def delta(r):
        if r < 1e-7:
            raise StopIteration
        N = data.tree.root.count(dcenter.tree.root, r)
        n = N / (4. / 3. * numpy.pi * r ** 3)
        return 1.0 * n / nbar - 1
     
    try:
        d1 = delta(r1)
        while d1 > thresh:
            r1 *= 1.4
            d1 = delta(r1)
        # d1 < 200
        r2 = r1
        d2 = d1
        while d2 < thresh:
            r2 *= 0.7
            d2 = delta(r2)
        # d2 > 200

        while True:
            r = (r1 * r2) ** 0.5
            d = delta(r)
            x = (d - thresh)
            if x > 0.1:
                r2 = r
            elif x < -0.1:
                r1 = r
            else:
                return r

    except StopIteration:
        return numpy.nan
示例#13
0
def main():
    comm = MPI.COMM_WORLD
    np = split_size_2d(comm.size)

    grid = [
        numpy.linspace(0, 1.0, np[0] + 1, endpoint=True),
        numpy.linspace(0, 1.0, np[1] + 1, endpoint=True),
    ]
    domain = GridND(grid)
    if comm.rank == 0:
        logging.info('grid %s' % str(grid) )

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Position', 'ID'])

    tpos = P['Position']
    tid = P['ID']
    del P

    Ntot = sum(comm.allgather(len(tpos)))

    if comm.rank == 0:
        logging.info('Total number of particles %d, ll %g' % (Ntot, ns.LinkingLength))
    ll = ns.LinkingLength * Ntot ** -0.3333333

    #print pos
    #print ((pos[0] - pos[1]) ** 2).sum()** 0.5, ll
  
    layout = domain.decompose(tpos, smoothing=ll * 1)

    tpos = layout.exchange(tpos)
    tid = layout.exchange(tid)

    logging.info('domain %d has %d particles' % (comm.rank, len(tid)))

    data = cluster.dataset(tpos, boxsize=1.0)
    fof = cluster.fof(data, linking_length=ll, np=0, verbose=True)
    
    # initialize global labels
    minid = equiv_class(fof.labels, tid, op=numpy.fmin)[fof.labels]
    del fof
    del data
    del tpos
    del tid

    while True:
        # merge, if a particle belongs to several ranks
        # use the global label of the minimal
        minid_new = layout.gather(minid, mode=numpy.fmin)
        minid_new = layout.exchange(minid_new)

        # on my rank, these particles have been merged
        merged = minid_new != minid
        # if no rank has merged any, we are done
        # gl is the global label (albeit with some holes)
        total = comm.allreduce(merged.sum())
            
        if comm.rank == 0:
            print 'merged ', total, 'halos'

        if total == 0:
            del minid_new
            break
        old = minid[merged]
        new = minid_new[merged]
        arg = old.argsort()
        new = new[arg]
        old = old[arg]
        replacesorted(minid, old, new, out=minid)

    minid = layout.gather(minid, mode=numpy.fmin)

    label = assign_halo_label(minid, comm, thresh=ns.nmin) 

    N = halos.count(label, comm=comm)

    if comm.rank == 0:
        print 'total halos is', len(N)

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Position'])

    hpos = halos.centerofmass(label, P['Position'], boxsize=1.0, comm=comm)

    [P] = read(comm, ns.filename, TPMSnapshotFile, columns=['Velocity'])

    hvel = halos.centerofmass(label, P['Velocity'], boxsize=None, comm=comm)

    if comm.rank == 0:
        print N
        print 'total groups', N.shape
        print 'total particles', N.sum()
        print 'above ', ns.nmin, (N >ns.nmin).sum()
        N[0] = -1
        with open(ns.output + '.halo', 'w') as ff:
            numpy.int32(len(N)).tofile(ff)
            numpy.float32(ns.LinkingLength).tofile(ff)
            numpy.int32(N).tofile(ff)
            numpy.float32(hpos).tofile(ff)
            numpy.float32(hvel).tofile(ff)
        print hpos
    del N
    del hpos

    npart = None
    if comm.rank == 0:
        snapshot = Snapshot(ns.filename,TPMSnapshotFile)
        npart = snapshot.npart
        for i in range(len(snapshot.npart)):
            with open(ns.output + '.grp.%02d' % i, 'w') as ff:
                numpy.int32(npart[i]).tofile(ff)
                numpy.float32(ns.LinkingLength).tofile(ff)
                pass
    npart = comm.bcast(npart)

    start = sum(comm.allgather(len(label))[:comm.rank])
    end = sum(comm.allgather(len(label))[:comm.rank+1])
    label = numpy.int32(label)
    written = 0
    for i in range(len(npart)):
        filestart = sum(npart[:i])
        fileend = sum(npart[:i+1])
        mystart = start - filestart
        myend = end - filestart
        if myend <= 0 : continue
        if mystart >= npart[i] : continue
        if myend > npart[i]: myend = npart[i]
        if mystart < 0: mystart = 0
        with open(ns.output + '.grp.%02d' % i, 'r+') as ff:
            ff.seek(8, 0)
            ff.seek(mystart * 4, 1)
            label[written:written + myend - mystart].tofile(ff)
        written += myend - mystart

    return
示例#14
0
文件: fof.py 项目: DonRegan/nbodykit
def local_fof(pos, ll):
    data = cluster.dataset(pos, boxsize=1.0)
    fof = cluster.fof(data, linking_length=ll, np=0, verbose=True)
    labels = fof.labels
    return labels
示例#15
0
def subfof(pos, vel, ll, vfactor, haloid, Ntot, boxsize):
    nbar = Ntot / boxsize.prod()
    first = pos[0].copy()
    pos -= first
    pos /= boxsize
    pos[pos > 0.5]  -= 1.0 
    pos[pos < -0.5] += 1.0 
    pos *= boxsize
    pos += first

    oldvel = vel.copy()
    vmean = vel.mean(axis=0, dtype='f8')
    vel -= vmean
    sigma_1d = (vel** 2).mean(dtype='f8') ** 0.5
    vel /= (vfactor * sigma_1d)
    vel *= ll
    data = numpy.concatenate(( pos, vel), axis=1)
    #data = pos

    data = cluster.dataset(data)
    Nsub = 0
    thresh = 80
    fof = cluster.fof(data, linking_length=ll, np=0)

    while Nsub == 0 and thresh > 1:
        # reducing the threshold till we find something..
        Nsub = (fof.length > thresh).sum()
        thresh *= 0.9
    # if nothing is found then assume this FOF group is a fluke.
 
    output = numpy.empty(Nsub, dtype=[
        ('Position', ('f4', 3)),
        ('Velocity', ('f4', 3)),
        ('LinkingLength', 'f4'),
        ('R200', 'f4'),
        ('R500', 'f4'),
        ('R1200', 'f4'),
        ('R2400', 'f4'),
        ('R6000', 'f4'),
        ('Length', 'i4'),
        ('HaloID', 'i4'),
        ])

    output['Position'][...] = fof.center()[:Nsub, :3]
    output['Length'][...] = fof.length[:Nsub]
    output['HaloID'][...] = haloid
    output['LinkingLength'][...] = ll

    for i in range(3):
        output['Velocity'][..., i] = fof.sum(oldvel[:, i])[:Nsub] / output['Length']

    del fof
    del data
    data = cluster.dataset(pos)

    for i in range(Nsub):
        center = output['Position'][i] 
        rmax = (((pos - center) ** 2).sum(axis=-1) ** 0.5).max()
        r1 = rmax
        output['R200'][i] = so(center, data, r1, nbar, 200.)
        output['R500'][i] = so(center, data, r1, nbar, 500.)
        output['R1200'][i] = so(center, data, output['R200'][i] * 0.5, nbar, 1200.)
        output['R2400'][i] = so(center, data, output['R1200'][i] * 0.5, nbar, 2400.)
        output['R6000'][i] = so(center, data, output['R2400'][i] * 0.5, nbar, 6000.)
    output.sort(order=['Length', 'Position'])
    print(output)
    output = output[::-1]
    return output