def find_pdf(snapshot, grid, MAS, do_RSD, axis, threads, ptype, fpdf,
             smoothing, Filter):

    if os.path.exists(fpdf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate the overdensity field
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    #delta -= 1.0

    # define the array containing the variance
    var = np.zeros(smoothing.shape[0], dtype=np.float64)
    var_log = np.zeros(smoothing.shape[0], dtype=np.float64)

    # do a loop over the different smoothing scales
    for i, smooth_scale in enumerate(smoothing):

        # smooth the overdensity field
        W_k = SL.FT_filter(BoxSize, smooth_scale, grid, Filter, threads)
        delta_smoothed = SL.field_smoothing(delta, W_k, threads)

        # compute the variance of the field
        var[i] = np.var(delta_smoothed)

        indexes = np.where(delta_smoothed > 0.0)
        var_log[i] = np.var(np.log10(delta_smoothed[indexes]))

    # save results to file
    np.savetxt(fpdf, np.transpose([smoothing, var, var_log]), delimiter='\t')
Ejemplo n.º 2
0
def Pk_comp(snapshot_fname,ptype,dims,do_RSD,axis,cpus,folder_out):

    # read relevant paramaters on the header
    print 'Computing power spectrum...'
    head     = readsnap.snapshot_header(snapshot_fname)
    BoxSize  = head.boxsize/1e3 #Mpc/h
    Masses   = head.massarr*1e10 #Msun/h
    Nall     = head.nall;  Ntotal = np.sum(Nall,dtype=np.int64)
    Omega_m  = head.omega_m
    Omega_l  = head.omega_l
    redshift = head.redshift
    Hubble   = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_l)  #km/s/(Mpc/h)
    z        = '%.3f'%redshift
        
    # find output file name
    fout = folder_out+'/Pk_' + name_dict[str(ptype)]
    if do_RSD:  fout += ('_RS_axis=' + str(axis) + '_z=' + z + '.dat')
    else:       fout +=                           ('_z=' + z + '.dat')

    # read the positions of the particles
    pos = readsnap.read_block(snapshot_fname,"POS ",parttype=ptype)/1e3 #Mpc/h
    print '%.3f < X [Mpc/h] < %.3f'%(np.min(pos[:,0]),np.max(pos[:,0]))
    print '%.3f < Y [Mpc/h] < %.3f'%(np.min(pos[:,1]),np.max(pos[:,1]))
    print '%.3f < Z [Mpc/h] < %.3f\n'%(np.min(pos[:,2]),np.max(pos[:,2]))

    # read the velocities of the particles
    if do_RSD:
        print 'moving particles to redshift-space...'
        vel = readsnap.read_block(snapshot_fname,"VEL ",parttype=ptype) #km/s
        RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis)
        del vel;  print 'done'

    # define delta array
    delta = np.zeros((dims,dims,dims),dtype=np.float32)

    # when dealing with all particles take into account their different masses
    if ptype==-1:
        if Nall[0]==0: #if not hydro
            M = np.zeros(Ntotal,dtype=np.float32) #define the mass array
            offset = 0
            for ptype in [0,1,2,3,4,5]:
                M[offset:offset+Nall[ptype]] = Masses[ptype]
                offset += Nall[ptype]
        else:
            M = readsnap.read_block(snapshot_fname,"MASS",parttype=-1)*1e10
        
        mean = np.sum(M,dtype=np.float64)/dims**3
        MASL.MA(pos,delta,BoxSize,'CIC',M); del pos,M

    else:  
        mean = len(pos)*1.0/dims**3
        MASL.MA(pos,delta,BoxSize,'CIC'); del pos

    # compute the P(k) and save results to file
    delta /= mean;  delta -= 1.0
    Pk = PKL.Pk(delta,BoxSize,axis=axis,MAS='CIC',threads=cpus);  del delta
    np.savetxt(fout,np.transpose([Pk.k3D, Pk.Pk[:,0], Pk.Pk[:,1], Pk.Pk[:,2],
                                  Pk.Nmodes3D]))
Ejemplo n.º 3
0
def find_Bk(snapshot, snapnum, Ngrid, Nmax, Ncut, step, do_RSD, axis, ptype,
            fbk):

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read the snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move positions to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate bispectrum
    b123out = pySpec.Bk_periodic(pos.T,
                                 Lbox=BoxSize,
                                 Ngrid=Ngrid,
                                 step=step,
                                 Ncut=Ncut,
                                 Nmax=Nmax,
                                 fft='pyfftw',
                                 nthreads=1,
                                 silent=False)

    i_k = b123out['i_k1']
    j_k = b123out['i_k2']
    l_k = b123out['i_k3']
    p0k1 = b123out['p0k1']
    p0k2 = b123out['p0k2']
    p0k3 = b123out['p0k3']
    b123 = b123out['b123']
    b_sn = b123out['b123_sn']
    q123 = b123out['q123']
    cnts = b123out['counts']

    hdr = ('matter bispectrum; k_f = 2pi/%.1f, Nhalo=%i' %
           (BoxSize, pos.shape[0]))
    np.savetxt(
        fbk,
        np.array([i_k, j_k, l_k, p0k1, p0k2, p0k3, b123, q123, b_sn, cnts]).T,
        fmt='%i %i %i %.5e %.5e %.5e %.5e %.5e %.5e %.5e',
        delimiter='\t',
        header=hdr)
Ejemplo n.º 4
0
def find_pdf(snapshot, grid, MAS, do_RSD, axis, threads, ptype, fpdf,
             smoothing, Filter):

    if os.path.exists(fpdf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate the overdensity field
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    #delta -= 1.0

    # smooth the overdensity field
    W_k = SL.FT_filter(BoxSize, smoothing, grid, Filter, threads)
    delta_smoothed = SL.field_smoothing(delta, W_k, threads)

    bins = np.logspace(-2, 2, 100)
    pdf, mean = np.histogram(delta_smoothed, bins=bins)
    mean = 0.5 * (mean[1:] + mean[:-1])
    pdf = pdf * 1.0 / grid**3

    # save results to file
    np.savetxt(fpdf, np.transpose([mean, pdf]), delimiter='\t')
Ejemplo n.º 5
0
def find_CF(snapshot, snapnum, grid, MAS, do_RSD, axis, threads, ptype, fcf,
            save_multipoles):

    if os.path.exists(fcf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate CF
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    delta -= 1.0
    CF = PKL.Xi(delta, BoxSize, MAS, axis, threads)

    # save results to file
    if save_multipoles:
        np.savetxt(fcf,
                   np.transpose(
                       [CF.r3D, CF.xi[:, 0], CF.xi[:, 1], CF.xi[:, 2]]),
                   delimiter='\t')
    else:
        np.savetxt(fcf, np.transpose([CF.r3D, CF.xi[:, 0]]), delimiter='\t')
Ejemplo n.º 6
0
def find_Pk(folder, snapdir, snapnum, grid, MAS, do_RSD, axis, threads,
            fixed_Mmin, Mmin, Nhalos, fpk, save_multipoles):

    if os.path.exists(fpk):  return 0
    
    # read header
    head     = readgadget.header(snapdir)
    BoxSize  = head.boxsize/1e3  #Mpc/h                      
    Omega_m  = head.omega_m
    Omega_l  = head.omega_l
    redshift = head.redshift
    Hubble   = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_l)#km/s/(Mpc/h)
    h        = head.hubble

    # read halo catalogue
    FoF   = readfof.FoF_catalog(folder, snapnum, long_ids=False,
                                swap=False, SFR=False, read_IDs=False)
    pos_h = FoF.GroupPos/1e3            #Mpc/h
    mass  = FoF.GroupMass*1e10          #Msun/h
    vel_h = FoF.GroupVel*(1.0+redshift) #km/s
    if fixed_Mmin:
        indexes = np.where(mass>Mmin)[0]
        pos_h = pos_h[indexes];  vel_h = vel_h[indexes];  del indexes
    else:
        indexes = np.argsort(mass)[-Nhalos:] #take the Nhalos most massive halos
        pos_h = pos_h[indexes];  vel_h = vel_h[indexes];  del indexes

    # move halos to redshift-space
    if do_RSD:  RSL.pos_redshift_space(pos_h, vel_h, BoxSize, Hubble, redshift, axis)

    # calculate Pk
    delta = np.zeros((grid,grid,grid), dtype=np.float32)
    MASL.MA(pos_h, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64);  delta -= 1.0 
    Pk = PKL.Pk(delta, BoxSize, axis, MAS, threads)

    # save results to file
    hdr = ('Nhalos=%i BoxSize=%.3f'%(pos_h.shape[0],BoxSize))    
    if save_multipoles:
        np.savetxt(fpk, np.transpose([Pk.k3D, Pk.Pk[:,0], Pk.Pk[:,1], Pk.Pk[:,2]]),
                   delimiter='\t', header=hdr)
    else:
        np.savetxt(fpk, np.transpose([Pk.k3D, Pk.Pk[:,0]]),
                   delimiter='\t', header=hdr)
Ejemplo n.º 7
0
def Illustris_region(snapshot_root,
                     snapnum,
                     TREECOOL_file,
                     x_min,
                     x_max,
                     y_min,
                     y_max,
                     z_min,
                     z_max,
                     padding,
                     fout,
                     redshift_space=False,
                     axis=0):

    # read snapshot and find number of subfiles
    snapshot = snapshot_root + 'snapdir_%03d/snap_%03d' % (snapnum, snapnum)
    header = rs.snapshot_header(snapshot)
    nall = header.nall
    redshift = header.redshift
    BoxSize = header.boxsize / 1e3  #Mpc/h
    filenum = header.filenum
    Omega_m = header.omega0
    Omega_L = header.omegaL
    h = header.hubble
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_L)  #km/s/(Mpc/h)

    if myrank == 0:
        print '\n'
        print 'BoxSize         = %.3f Mpc/h' % BoxSize
        print 'Number of files = %d' % filenum
        print 'Omega_m         = %.3f' % Omega_m
        print 'Omega_l         = %.3f' % Omega_L
        print 'redshift        = %.3f' % redshift

    # find the numbers each cpu will work on
    array = np.arange(0, filenum)
    numbers = np.where(array % nprocs == myrank)[0]

    # do a loop over the different realizations
    particles = 0
    for i in numbers:

        snapshot = snapshot_root + 'snapdir_%03d/snap_%03d.%d' % (snapnum,
                                                                  snapnum, i)
        pos = rs.read_block(snapshot, 'POS ', parttype=0, verbose=False) / 1e3
        pos = pos.astype(np.float32)

        # read velocities and displace particle positions
        if redshift_space:
            vel = rs.read_block(snapshot, 'VEL ', parttype=0,
                                verbose=False) / np.sqrt(1.0 + redshift)  #km/s
            RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

        # check if particles are in the region
        indexes_region = np.where((pos[:,0]>=x_min-padding) & (pos[:,0]<=x_max+padding) &\
                                  (pos[:,1]>=y_min-padding) & (pos[:,1]<=y_max+padding) &\
                                  (pos[:,2]>=z_min-padding) & (pos[:,2]<=z_max+padding))[0]

        # if particles are not in the region continue
        local_particles = indexes_region.shape[0]
        print 'Myrank = %d ---> num = %d ---> part = %ld' % (myrank, i,
                                                             local_particles)
        if local_particles == 0: continue

        # find radii, HI and gas masses
        MHI = rs.read_block(snapshot, 'NH  ', parttype=0, verbose=False)  #HI/H
        mass = rs.read_block(snapshot, 'MASS', parttype=0,
                             verbose=False) * 1e10
        SFR = rs.read_block(snapshot, 'SFR ', parttype=0, verbose=False)
        indexes = np.where(SFR > 0.0)[0]
        del SFR

        # find the metallicity of star-forming particles
        metals = rs.read_block(snapshot, 'GZ  ', parttype=0, verbose=False)
        metals = metals[indexes] / 0.0127

        # find densities of star-forming particles: units of h^2 Msun/Mpc^3
        rho = rs.read_block(snapshot, 'RHO ', parttype=0, verbose=False) * 1e19
        Volume = mass / rho  #(Mpc/h)^3
        radii = (Volume / (4.0 * np.pi / 3.0))**(1.0 / 3.0)  #Mpc/h
        rho = rho[indexes]  #h^2 Msun/Mpc^3
        Volume = Volume[indexes]  #(Mpc/h)^3

        # find volume and radius of star-forming particles
        radii_SFR = (Volume / (4.0 * np.pi / 3.0))**(1.0 / 3.0)  #Mpc/h

        # find HI/H fraction for star-forming particles
        MHI[indexes] = HIL.Rahmati_HI_Illustris(rho,
                                                radii_SFR,
                                                metals,
                                                redshift,
                                                h,
                                                TREECOOL_file,
                                                Gamma=None,
                                                fac=1,
                                                correct_H2=True)  #HI/H
        MHI *= (0.76 * mass)

        # select the particles belonging to the region
        pos = pos[indexes_region]
        MHI = MHI[indexes_region]
        radii = radii[indexes_region]
        mass = mass[indexes_region]

        # write partial files
        new_size = particles + local_particles

        if particles == 0:
            f = h5py.File(fout[:-5] + '_%d.hdf5' % myrank, 'w')
            f.create_dataset('pos', data=pos, maxshape=(None, 3))
            f.create_dataset('M_HI', data=MHI, maxshape=(None, ))
            f.create_dataset('radii', data=radii, maxshape=(None, ))
            f.create_dataset('mass', data=mass, maxshape=(None, ))
        else:
            f = h5py.File(fout[:-5] + '_%d.hdf5' % myrank, 'a')
            pos_f = f['pos']
            pos_f.resize((new_size, 3))
            M_HI_f = f['M_HI']
            M_HI_f.resize((new_size, ))
            radii_f = f['radii']
            radii_f.resize((new_size, ))
            mass_f = f['mass']
            mass_f.resize((new_size, ))
            pos_f[particles:] = pos
            M_HI_f[particles:] = MHI
            radii_f[particles:] = radii
            mass_f[particles:] = mass
        f.close()
        particles += local_particles

    comm.Barrier()

    # sum the particles found in each cpu
    All_particles = 0
    All_particles = comm.reduce(particles, op=MPI.SUM, root=0)

    # Master will merge partial files into a file one
    if myrank == 0:

        print 'Found %d particles' % All_particles
        f = h5py.File(fout, 'w')

        f1 = h5py.File(fout[:-5] + '_0.hdf5', 'r')
        pos = f1['pos'][:]
        M_HI = f1['M_HI'][:]
        radii = f1['radii'][:]
        mass = f1['mass'][:]
        f1.close()

        particles = mass.shape[0]
        pos_f = f.create_dataset('pos', data=pos, maxshape=(None, 3))
        M_HI_f = f.create_dataset('M_HI', data=M_HI, maxshape=(None, ))
        radii_f = f.create_dataset('radii', data=radii, maxshape=(None, ))
        mass_f = f.create_dataset('mass', data=mass, maxshape=(None, ))

        for i in xrange(1, nprocs):
            f1 = h5py.File(fout[:-5] + '_%d.hdf5' % i, 'r')
            pos = f1['pos'][:]
            M_HI = f1['M_HI'][:]
            radii = f1['radii'][:]
            mass = f1['mass'][:]
            f1.close()

            size = mass.shape[0]

            pos_f.resize((particles + size, 3))
            pos_f[particles:] = pos
            M_HI_f.resize((particles + size, ))
            M_HI_f[particles:] = M_HI
            radii_f.resize((particles + size, ))
            radii_f[particles:] = radii
            mass_f.resize((particles + size, ))
            mass_f[particles:] = mass

            particles += size

        f.close()

        for i in xrange(nprocs):
            os.system('rm ' + fout[:-5] + '_%d.hdf5' % i)
Ejemplo n.º 8
0
Hz = 100.0 * np.sqrt(Omega_m * (1.0 + z)**3 + Omega_l)

V_cell = BoxSize**3 * (5.0 / BoxSize) / dims**2  #(Mpc/h)^3

print '\nReading halo catalogue...'
snapshot_root = '%s/output/' % run
halos = groupcat.loadHalos(snapshot_root,
                           snapnum,
                           fields=['GroupPos', 'GroupMass', 'GroupVel'])
halo_pos = halos['GroupPos'] / 1e3  #Mpc/h
halo_vel = halos['GroupVel'] * (1.0 + z)  #km/s
halo_mass = halos['GroupMass'] * 1e10  #Msun/h
del halos

# move halo positions to redshift-space
RSL.pos_redshift_space(halo_pos, halo_vel, BoxSize, Hz, z, axis)

print np.min(halo_pos[:, 0]), np.max(halo_pos[:, 0])
print np.min(halo_pos[:, 1]), np.max(halo_pos[:, 1])
print np.min(halo_pos[:, 2]), np.max(halo_pos[:, 2])

indexes = np.where((halo_pos[:, 2] >= 0.0) & (halo_pos[:, 2] < 5.0))[0]

halo_pos = halo_pos[indexes]
halo_mass = halo_mass[indexes]
M_HI = M0 * (halo_mass / Mmin)**alpha * np.exp(-(Mmin / halo_mass)**0.35)

print np.min(halo_pos[:, 0]), np.max(halo_pos[:, 0])
print np.min(halo_pos[:, 1]), np.max(halo_pos[:, 1])
print np.min(halo_pos[:, 2]), np.max(halo_pos[:, 2])
Omega_m = head.omega_m
Omega_l = head.omega_l
redshift = head.redshift
Hubble = 100.0 * np.sqrt(Omega_m *
                         (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
h = head.hubble

# read particle positions and masses
pos = readsnap.read_block(snapshot_fname, "POS ", parttype=ptype) / 1e3  #Mpc/h
mass = readsnap.read_block(snapshot_fname, "MASS",
                           parttype=ptype) * 1e10  #Msun/h

# move particle positions to redshift-space
if do_RSD:
    vel = readsnap.read_block(snapshot_fname, "VEL ", parttype=ptype)  #km/s
    RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)
    del vel

# some verbose
print '%.3f < X [Mpc/h] < %.3f' % (np.min(pos[:, 0]), np.max(pos[:, 0]))
print '%.3f < Y [Mpc/h] < %.3f' % (np.min(pos[:, 1]), np.max(pos[:, 1]))
print '%.3f < Z [Mpc/h] < %.3f' % (np.min(pos[:, 2]), np.max(pos[:, 2]))
print 'Omega_ptype = %.4f' % (np.sum(mass, dtype=np.float64) / BoxSize**3 /
                              rho_crit)
print 'Omega_m     =', Omega_m

# compute the mean mass per cell
mean_mass = np.sum(mass, dtype=np.float64) / dims**3

# compute the density contrast in each point of the grid
delta = np.zeros(dims**3, dtype=np.float32)
Ejemplo n.º 10
0
            Hmass_b = Masses[Hmass_ind_b]
            #-------------------------------
            Hmass_c = Masses[Hmass_ind_c]
            #-------------------------------
            Hmass_d = Masses[Hmass_ind_d]

            #~ Hmass_a = (Hmass_1a + Hmass_2a)/2
            #~ Hmass_b = (Hmass_1b + Hmass_2b)/2
            #~ Hmass_c = (Hmass_1c + Hmass_2c)/2
            #~ Hmass_d = (Hmass_1d + Hmass_2d)/2

            ########################################################################
            ##### move particles to redshift-space
            ########################################################################

            RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, z, axe)

            #####################################################################
            ########## compute the halo mass function
            #####################################################################
            #~ print np.max(Masses1), np.min(Masses1), np.log10(np.max(Masses1))
            #~ bins = np.logspace(11,16,32)
            #~ N = len(Masses1)
            #~ weights = np.ones(N) * 1/float(N)/1e9
            #~ print 1/float(N)
            #~ cmf = MFL.Crocce_mass_function(kcamb,pcamb,Omega_b + Omega_c,z,1e11,1e16,32,Masses=None)
            #~ print np.shape(cmf)

            # halo mass function
            #~ plt.figure()
            #~ hist, binedge = np.histogram(Masses1, bins)
V_cell = BoxSize**3*(5.0/BoxSize)/dims**2 #(Mpc/h)^3



print '\nReading halo catalogue...'
snapshot_root = '%s/output/'%run
halos = groupcat.loadHalos(snapshot_root, snapnum, 
                           fields=['GroupPos','GroupMass','GroupVel'])
halo_pos  = halos['GroupPos']/1e3     #Mpc/h
halo_vel  = halos['GroupVel']*(1.0+z) #km/s
halo_mass = halos['GroupMass']*1e10   #Msun/h
del halos

# move halo positions to redshift-space                  
RSL.pos_redshift_space(halo_pos, halo_vel, BoxSize, Hz, z, axis)


print np.min(halo_pos[:,0]),np.max(halo_pos[:,0])
print np.min(halo_pos[:,1]),np.max(halo_pos[:,1])
print np.min(halo_pos[:,2]),np.max(halo_pos[:,2])


indexes = np.where((halo_pos[:,2]>=0.0) & (halo_pos[:,2]<5.0))[0]

halo_pos  = halo_pos[indexes]
halo_mass = halo_mass[indexes]
M_HI = M0*(halo_mass/Mmin)**alpha*np.exp(-(Mmin/halo_mass)**0.35)

print np.min(halo_pos[:,0]),np.max(halo_pos[:,0])
print np.min(halo_pos[:,1]),np.max(halo_pos[:,1])
Ejemplo n.º 12
0
def compute_Pk(snapshot_fname,dims,do_RSD,axis,hydro):

    # read snapshot head and obtain BoxSize, Omega_m and Omega_L
    print '\nREADING SNAPSHOTS PROPERTIES'
    head     = readsnap.snapshot_header(snapshot_fname)
    BoxSize  = head.boxsize/1e3 #Mpc/h
    Nall     = head.nall
    Masses   = head.massarr*1e10 #Msun/h
    Omega_m  = head.omega_m
    Omega_l  = head.omega_l
    redshift = head.redshift
    Hubble   = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_l)  #h*km/s/Mpc
    h        = head.hubble

    z = '%.3f'%redshift
    f_out = 'Pk_m_z='+z+'.dat'

    # compute the values of Omega_CDM and Omega_B
    Omega_cdm = Nall[1]*Masses[1]/BoxSize**3/rho_crit
    Omega_nu  = Nall[2]*Masses[2]/BoxSize**3/rho_crit
    Omega_b   = Omega_m-Omega_cdm-Omega_nu
    print '\nOmega_CDM = %.4f\nOmega_B   = %0.4f\nOmega_NU  = %.4f'\
        %(Omega_cdm,Omega_b,Omega_nu)
    print 'Omega_M   = %.4f'%(Omega_m)

    # read the positions of all the particles
    pos = readsnap.read_block(snapshot_fname,"POS ",parttype=-1)/1e3 #Mpc/h
    print '%.3f < X [Mpc/h] < %.3f'%(np.min(pos[:,0]),np.max(pos[:,0]))
    print '%.3f < Y [Mpc/h] < %.3f'%(np.min(pos[:,1]),np.max(pos[:,1]))
    print '%.3f < Z [Mpc/h] < %.3f\n'%(np.min(pos[:,2]),np.max(pos[:,2]))

    if do_RSD:
        print 'moving particles to redshift-space'
        # read the velocities of all the particles
        vel = readsnap.read_block(snapshot_fname,"VEL ",parttype=-1) #km/s
        RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis);  del vel

    # read the masses of all the particles
    if not(hydro):
        Ntotal = np.sum(Nall,dtype=np.int64)   #compute the number of particles
        M = np.zeros(Ntotal,dtype=np.float32)  #define the mass array
        offset = 0
        for ptype in [0,1,2,3,4,5]:
            M[offset:offset+Nall[ptype]] = Masses[ptype];  offset += Nall[ptype]
    else:
        M = readsnap.read_block(snapshot_fname,"MASS",parttype=-1)*1e10 #Msun/h
    print '%.3e < M [Msun/h] < %.3e'%(np.min(M),np.max(M))
    print 'Omega_M = %.4f\n'%(np.sum(M,dtype=np.float64)/rho_crit/BoxSize**3)

    # compute the mean mass per grid cell
    mean_M = np.sum(M,dtype=np.float64)/dims**3

    # compute the mass within each grid cell
    delta = np.zeros(dims**3,dtype=np.float32)
    CIC.CIC_serial(pos,dims,BoxSize,delta,M); del pos
    print '%.6e should be equal to \n%.6e\n'\
        %(np.sum(M,dtype=np.float64),np.sum(delta,dtype=np.float64)); del M

    # compute the density constrast within each grid cell
    delta/=mean_M; delta-=1.0
    print '%.3e < delta < %.3e\n'%(np.min(delta),np.max(delta))

    # compute the P(k)
    Pk = PSL.power_spectrum_given_delta(delta,dims,BoxSize)

    # write P(k) to output file
    np.savetxt(f_out,np.transpose([Pk[0],Pk[1]]))
Ejemplo n.º 13
0
def density_field_gadget(snapshot_fname,
                         ptypes,
                         dims,
                         MAS='CIC',
                         do_RSD=False,
                         axis=0,
                         verbose=True):

    start = time.time()
    if verbose: print('\nComputing density field of particles', ptypes)

    # declare the array hosting the density field
    density = np.zeros((dims, dims, dims), dtype=np.float32)

    # read relevant paramaters on the snapshot
    head = readgadget.header(snapshot_fname)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Masses = head.massarr * 1e10  #Msun/h
    Nall = head.nall
    Ntotal = np.sum(Nall, dtype=np.int64)
    filenum = head.filenum
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    fformat = head.format
    Hubble = head.Hubble

    if ptypes == [-1]: ptypes = [0, 1, 2, 3, 4, 5]
    if len(ptypes) == 1: single_component = True
    else: single_component = False

    # do a loop over all files
    num = 0.0
    for i in range(filenum):

        # find the name of the sub-snapshot
        if filenum == 1: snapshot = snapshot_fname
        else: snapshot = snapshot_fname + '.%d' % i
        if fformat == 'hdf5': snapshot = snapshot + '.hdf5'

        # find the local particles in the sub-snapshot
        head = readgadget.header(snapshot)
        npart = head.npart

        # do a loop over all particle types
        for ptype in ptypes:

            if npart[ptype] == 0: continue

            # read positions in Mpc/h
            pos = readgadget.read_field(snapshot, "POS ", ptype) / 1e3
            #pos = readsnap.read_block(snapshot,"POS ",parttype=ptype)/1e3

            # read velocities in km/s and move particles to redshift-space
            if do_RSD:
                vel = readgadget.read_field(snapshot, "VEL ", ptype)
                #vel = readsnap.read_block(snapshot,"VEL ",parttype=ptype)
                RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift,
                                       axis)
                del vel

            # compute density field. If multicomponent, read/find masses
            if Masses[ptype] != 0:
                if single_component:
                    MASL.MA(pos, density, BoxSize, MAS)
                    num += pos.shape[0]
                else:
                    mass = np.ones(npart[ptype],
                                   dtype=np.float32) * Masses[ptype]
                    MASL.MA(pos, density, BoxSize, MAS, W=mass)
                    num += np.sum(mass, dtype=np.float64)
            else:
                mass = readgadget.read_field(snapshot, "MASS", ptype) * 1e10
                #mass = readsnap.read_block(snapshot,"MASS",
                #        parttype=ptype)*1e10 #Msun/h
                MASL.MA(pos, density, BoxSize, MAS, W=mass)
                num += np.sum(mass, dtype=np.float64)

    if verbose:
        print('%.8e should be equal to\n%.8e'\
            %(np.sum(density, dtype=np.float64), num))
        print('Time taken = %.2f seconds' % (time.time() - start))

    return np.asarray(density)
Ejemplo n.º 14
0
def density_field_gadget(snapshot_fname, ptypes, dims, MAS='CIC',
	do_RSD=False, axis=0, verbose=True): 

	start = time.time()
	if verbose:  print '\nComputing density field of particles',ptypes

	# declare the array hosting the density field
	density = np.zeros((dims, dims, dims), dtype=np.float32)

	# read relevant paramaters on the snapshot
	head     = readgadget.header(snapshot_fname)
	BoxSize  = head.boxsize/1e3 #Mpc/h
	Masses   = head.massarr*1e10 #Msun/h
	Nall     = head.nall;  Ntotal = np.sum(Nall,dtype=np.int64)
	filenum  = head.filenum
	Omega_m  = head.omega_m
	Omega_l  = head.omega_l
	redshift = head.redshift
        fformat  = head.format
	Hubble   = head.Hubble


	if ptypes==[-1]:  ptypes = [0, 1, 2, 3, 4, 5]
	if len(ptypes)==1:  single_component=True
	else:               single_component=False

	# do a loop over all files
	num = 0.0
	for i in xrange(filenum):

		# find the name of the sub-snapshot
                if filenum==1:       snapshot = snapshot_fname
                else:                snapshot = snapshot_fname+'.%d'%i
                if fformat=='hdf5':  snapshot = snapshot+'.hdf5'

		# find the local particles in the sub-snapshot
		head  = readgadget.header(snapshot)
		npart = head.npart

		# do a loop over all particle types
		for ptype in ptypes:

			if npart[ptype]==0:  continue

			# read positions in Mpc/h
                        pos = readgadget.read_field(snapshot, "POS ", ptype)/1e3
			#pos = readsnap.read_block(snapshot,"POS ",parttype=ptype)/1e3

			# read velocities in km/s and move particles to redshift-space
			if do_RSD:
                                vel = readgadget.read_field(snapshot, "VEL ", ptype)
				#vel = readsnap.read_block(snapshot,"VEL ",parttype=ptype)
				RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis)
				del vel

			# compute density field. If multicomponent, read/find masses
                        if Masses[ptype]!=0:
                                if single_component:
                                        MASL.MA(pos, density, BoxSize, MAS) 
                                        num += pos.shape[0]
                                else:
					mass = np.ones(npart[ptype], dtype=np.float32)*Masses[ptype]
                                        MASL.MA(pos, density, BoxSize, MAS, W=mass) 
                                        num += np.sum(mass, dtype=np.float64)
                        else:
                                mass = readgadget.read_field(snapshot, "MASS", ptype)*1e10
                                #mass = readsnap.read_block(snapshot,"MASS",
                                #        parttype=ptype)*1e10 #Msun/h
                                MASL.MA(pos, density, BoxSize, MAS, W=mass) 
                                num += np.sum(mass, dtype=np.float64)

	if verbose:
		print '%.8e should be equal to\n%.8e'\
			%(np.sum(density, dtype=np.float64), num)
		print 'Time taken = %.2f seconds'%(time.time()-start)

	return np.asarray(density)
Ejemplo n.º 15
0
def find_Bk(folder, snapdir, snapnum, axis, Ngrid, step, Ncut, Nmax, do_RSD,
            fixed_Mmin, Mmin, Nhalos):

    # read header
    head = readgadget.header(snapdir)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read halo catalogue
    FoF = readfof.FoF_catalog(folder,
                              snapnum,
                              long_ids=False,
                              swap=False,
                              SFR=False,
                              read_IDs=False)
    pos_h = FoF.GroupPos / 1e3  #Mpc/h
    mass = FoF.GroupMass * 1e10  #Msun/h
    vel_h = FoF.GroupVel * (1.0 + redshift)  #km/s
    if fixed_Mmin:
        indexes = np.where(mass > Mmin)[0]
        pos_h = pos_h[indexes]
        vel_h = vel_h[indexes]
        del indexes
    else:
        indexes = np.argsort(mass)[
            -Nhalos:]  #take the Nhalos most massive halos
        pos_h = pos_h[indexes]
        vel_h = vel_h[indexes]
        del indexes

    # move halos to redshift-space
    if do_RSD:
        RSL.pos_redshift_space(pos_h, vel_h, BoxSize, Hubble, redshift, axis)

    # calculate bispectrum
    b123out = pySpec.Bk_periodic(pos_h.T,
                                 Lbox=BoxSize,
                                 Ngrid=Ngrid,
                                 step=step,
                                 Ncut=Ncut,
                                 Nmax=Nmax,
                                 fft='pyfftw',
                                 nthreads=1,
                                 silent=False)

    i_k = b123out['i_k1']
    j_k = b123out['i_k2']
    l_k = b123out['i_k3']
    p0k1 = b123out['p0k1']
    p0k2 = b123out['p0k2']
    p0k3 = b123out['p0k3']
    b123 = b123out['b123']
    b_sn = b123out['b123_sn']
    q123 = b123out['q123']
    cnts = b123out['counts']

    hdr = ('halo bispectrum; k_f = 2pi/%.1f, Nhalo=%i' %
           (BoxSize, pos_h.shape[0]))
    np.savetxt(
        fbk,
        np.array([i_k, j_k, l_k, p0k1, p0k2, p0k3, b123, q123, b_sn, cnts]).T,
        fmt='%i %i %i %.5e %.5e %.5e %.5e %.5e %.5e %.5e',
        delimiter='\t',
        header=hdr)
Ejemplo n.º 16
0
    #define the delta array and the mean_mass variable
    delta     = np.zeros(dims3,dtype=np.float32)
    mean_mass = 0.0   #Msun/h

    #make a loop over all particle types and sum their masses in the grid
    for ptype in particle_type:

        #read particle positions 
        pos  = readsnap.read_block(snapshot_fname,"POS ",
                                   parttype=ptype)/1e3  #Mpc/h

        #displace particle positions to redshift-space
        if do_RSD:
            vel  = readsnap.read_block(snapshot_fname,"VEL ",
                                       parttype=ptype)  #km/s
            RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis)
            del vel

        #read particle masses
        mass = readsnap.read_block(snapshot_fname,"MASS",
                                   parttype=ptype)*1e10 #Msun/h

        print 'Number of '+pname[ptype]+' particles =',len(pos)
        print '%.4f < X < %.4f'%(np.min(pos[:,0]), np.max(pos[:,0]))
        print '%.4f < Y < %.4f'%(np.min(pos[:,1]), np.max(pos[:,1]))
        print '%.4f < Z < %.4f'%(np.min(pos[:,2]), np.max(pos[:,2]))
        print '%.4e < Mass < %.4e'%(np.min(mass), np.max(mass))

        #compute the value of Omega
        print 'Omega_'+pname[ptype]+' = %.4f'\
            %(np.sum(mass,dtype=np.float64)/BoxSize**3/rho_crit)
Ejemplo n.º 17
0
def Pk_Gadget(snapshot_fname,dims,particle_type,do_RSD,axis,cpus,
              folder_out=None):

    # find folder to place output files. Default is current directory
    if folder_out is None:  folder_out = os.getcwd()

    # for either one single species or all species use this routine
    if len(particle_type)==1:
        Pk_comp(snapshot_fname,particle_type[0],dims,do_RSD,
                axis,cpus,folder_out)
        return None

    # read snapshot head and obtain BoxSize, Omega_m and Omega_L
    print '\nREADING SNAPSHOTS PROPERTIES'
    head     = readsnap.snapshot_header(snapshot_fname)
    BoxSize  = head.boxsize/1e3  #Mpc/h
    Nall     = head.nall
    Masses   = head.massarr*1e10 #Msun/h
    Omega_m  = head.omega_m
    Omega_l  = head.omega_l
    redshift = head.redshift
    Hubble   = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_l)  #km/s/(Mpc/h)
    h        = head.hubble
    z        = '%.3f'%redshift
    dims3    = dims**3

    # compute the values of Omega_cdm, Omega_nu, Omega_gas and Omega_s
    Omega_c = Masses[1]*Nall[1]/BoxSize**3/rho_crit
    Omega_n = Masses[2]*Nall[2]/BoxSize**3/rho_crit
    Omega_g, Omega_s = 0.0, 0.0
    if Nall[0]>0:
        if Masses[0]>0:  
            Omega_g = Masses[0]*Nall[0]/BoxSize**3/rho_crit
            Omega_s = Masses[4]*Nall[4]/BoxSize**3/rho_crit
        else:    
            # mass in Msun/h
            mass = readsnap.read_block(snapshot_fname,"MASS",parttype=0)*1e10 
            Omega_g = np.sum(mass,dtype=np.float64)/BoxSize**3/rho_crit
            mass = readsnap.read_block(snapshot_fname,"MASS",parttype=4)*1e10
            Omega_s = np.sum(mass,dtype=np.float64)/BoxSize**3/rho_crit
            del mass

    # some verbose
    print 'Omega_gas    = ',Omega_g
    print 'Omega_cdm    = ',Omega_c
    print 'Omega_nu     = ',Omega_n
    print 'Omega_star   = ',Omega_s
    print 'Omega_m      = ',Omega_g + Omega_c + Omega_n + Omega_s
    print 'Omega_m snap = ',Omega_m

    # dictionary giving the value of Omega for each component
    Omega_dict = {0:Omega_g, 1:Omega_c, 2:Omega_n, 4:Omega_s}
    #####################################################################

    # define the array containing the deltas
    delta = [[],[],[],[]]  #array containing the gas, CDM, NU and stars deltas

    # dictionary among particle type and the index in the delta and Pk arrays
    # delta of stars (ptype=4) is delta[3] not delta[4]
    index_dict = {0:0, 1:1, 2:2, 4:3} 

    # define suffix here
    if do_RSD:  suffix = '_RS_axis=' + str(axis) + '_z=' + z + '.dat'
    else:       suffix =                           '_z=' + z + '.dat'
    #####################################################################

    # do a loop over all particle types and compute the deltas
    for ptype in particle_type:
    
        # read particle positions in #Mpc/h
        pos = readsnap.read_block(snapshot_fname,"POS ",parttype=ptype)/1e3 

        # move particle positions to redshift-space
        if do_RSD:
            vel = readsnap.read_block(snapshot_fname,"VEL ",parttype=ptype)#km/s
            RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis)
            del vel

        # find the index of the particle type in the delta array
        index = index_dict[ptype]

        # compute mean number of particles per grid cell
        mean_number = len(pos)*1.0/dims3

        # compute the deltas
        delta[index] = np.zeros((dims,dims,dims),dtype=np.float32)
        MASL.MA(pos,delta[index],BoxSize,'CIC');  del pos
        delta[index] /= mean_number;  delta[index] -= 1.0
    #####################################################################

    #####################################################################
    # if there are two or more particles compute auto- and cross-power spectra
    for i,ptype1 in enumerate(particle_type):
        for ptype2 in particle_type[i+1:]:

            # find the indexes of the particle types
            index1 = index_dict[ptype1];  index2 = index_dict[ptype2]

            # choose the name of the output files
            fout1  = '/Pk_' + name_dict[str(ptype1)]             + suffix
            fout2  = '/Pk_' + name_dict[str(ptype2)]             + suffix
            fout12 = '/Pk_' + name_dict[str(ptype1)+str(ptype2)] + suffix
            fout1  = folder_out + fout1
            fout2  = folder_out + fout2
            fout12 = folder_out + fout12

            # some verbose
            print '\nComputing the auto- and cross-power spectra of types: '\
                ,ptype1,'-',ptype2
            print 'saving results in:';  print fout1,'\n',fout2,'\n',fout12

            # This routine computes the auto- and cross-power spectra
            data = PKL.XPk([delta[index1],delta[index2]],BoxSize,axis=axis,
                           MAS=['CIC','CIC'],threads=cpus)
                                                        
            k = data.k3D;   Nmodes = data.Nmodes3D

            # save power spectra results in the output files
            np.savetxt(fout12,np.transpose([k,
                                            data.XPk[:,0,0],
                                            data.XPk[:,1,0],
                                            data.XPk[:,2,0],
                                            Nmodes]))
            np.savetxt(fout1, np.transpose([k,
                                            data.Pk[:,0,0],
                                            data.Pk[:,1,0],
                                            data.Pk[:,2,0],
                                            Nmodes]))
            np.savetxt(fout2, np.transpose([k,
                                            data.Pk[:,0,1],
                                            data.Pk[:,1,1],
                                            data.Pk[:,2,1],
                                            Nmodes]))
    #####################################################################

    #####################################################################
    # compute the power spectrum of the sum of all components
    print '\ncomputing P(k) of all components'

    # define delta of all components
    delta_tot = np.zeros((dims,dims,dims),dtype=np.float32)

    Omega_tot = 0.0;  fout = folder_out + '/Pk_'
    for ptype in particle_type:
        index = index_dict[ptype]
        delta_tot += (Omega_dict[ptype]*delta[index])
        Omega_tot += Omega_dict[ptype]
        fout += name_dict[str(ptype)] + '+'

    delta_tot /= Omega_tot;  del delta;  fout = fout[:-1] #avoid '+' in the end
    
    # compute power spectrum
    data = PKL.Pk(delta_tot,BoxSize,axis=axis,MAS='CIC',
                  threads=cpus);  del delta_tot

    # write P(k) to output file
    np.savetxt(fout+suffix, np.transpose([data.k3D,
                                          data.Pk[:,0],
                                          data.Pk[:,1],
                                          data.Pk[:,2],
                                          data.Nmodes3D]))
Ejemplo n.º 18
0
def compute_Pk(snapshot_fname, dims, do_RSD, axis, hydro):

    # read snapshot head and obtain BoxSize, Omega_m and Omega_L
    print '\nREADING SNAPSHOTS PROPERTIES'
    head = readsnap.snapshot_header(snapshot_fname)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall
    Masses = head.massarr * 1e10  #Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #h*km/s/Mpc
    h = head.hubble

    z = '%.3f' % redshift
    f_out = 'Pk_m_z=' + z + '.dat'

    # compute the values of Omega_CDM and Omega_B
    Omega_cdm = Nall[1] * Masses[1] / BoxSize**3 / rho_crit
    Omega_nu = Nall[2] * Masses[2] / BoxSize**3 / rho_crit
    Omega_b = Omega_m - Omega_cdm - Omega_nu
    print '\nOmega_CDM = %.4f\nOmega_B   = %0.4f\nOmega_NU  = %.4f'\
        %(Omega_cdm,Omega_b,Omega_nu)
    print 'Omega_M   = %.4f' % (Omega_m)

    # read the positions of all the particles
    pos = readsnap.read_block(snapshot_fname, "POS ",
                              parttype=-1) / 1e3  #Mpc/h
    print '%.3f < X [Mpc/h] < %.3f' % (np.min(pos[:, 0]), np.max(pos[:, 0]))
    print '%.3f < Y [Mpc/h] < %.3f' % (np.min(pos[:, 1]), np.max(pos[:, 1]))
    print '%.3f < Z [Mpc/h] < %.3f\n' % (np.min(pos[:, 2]), np.max(pos[:, 2]))

    if do_RSD:
        print 'moving particles to redshift-space'
        # read the velocities of all the particles
        vel = readsnap.read_block(snapshot_fname, "VEL ", parttype=-1)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)
        del vel

    # read the masses of all the particles
    if not (hydro):
        Ntotal = np.sum(Nall, dtype=np.int64)  #compute the number of particles
        M = np.zeros(Ntotal, dtype=np.float32)  #define the mass array
        offset = 0
        for ptype in [0, 1, 2, 3, 4, 5]:
            M[offset:offset + Nall[ptype]] = Masses[ptype]
            offset += Nall[ptype]
    else:
        M = readsnap.read_block(snapshot_fname, "MASS",
                                parttype=-1) * 1e10  #Msun/h
    print '%.3e < M [Msun/h] < %.3e' % (np.min(M), np.max(M))
    print 'Omega_M = %.4f\n' % (np.sum(M, dtype=np.float64) / rho_crit /
                                BoxSize**3)

    # compute the mean mass per grid cell
    mean_M = np.sum(M, dtype=np.float64) / dims**3

    # compute the mass within each grid cell
    delta = np.zeros(dims**3, dtype=np.float32)
    CIC.CIC_serial(pos, dims, BoxSize, delta, M)
    del pos
    print '%.6e should be equal to \n%.6e\n'\
        %(np.sum(M,dtype=np.float64),np.sum(delta,dtype=np.float64))
    del M

    # compute the density constrast within each grid cell
    delta /= mean_M
    delta -= 1.0
    print '%.3e < delta < %.3e\n' % (np.min(delta), np.max(delta))

    # compute the P(k)
    Pk = PSL.power_spectrum_given_delta(delta, dims, BoxSize)

    # write P(k) to output file
    np.savetxt(f_out, np.transpose([Pk[0], Pk[1]]))
Ejemplo n.º 19
0
def Illustris_region(snapshot_root, snapnum, TREECOOL_file, x_min, x_max, 
                     y_min, y_max, z_min, z_max, padding, fout,
                     redshift_space=False, axis=0):


    # read snapshot and find number of subfiles
    snapshot = snapshot_root + 'snapdir_%03d/snap_%03d'%(snapnum,snapnum)
    header   = rs.snapshot_header(snapshot)
    nall     = header.nall
    redshift = header.redshift
    BoxSize  = header.boxsize/1e3 #Mpc/h
    filenum  = header.filenum
    Omega_m  = header.omega0
    Omega_L  = header.omegaL
    h        = header.hubble
    Hubble = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_L) #km/s/(Mpc/h)

    if myrank==0:
        print '\n'
        print 'BoxSize         = %.3f Mpc/h'%BoxSize
        print 'Number of files = %d'%filenum
        print 'Omega_m         = %.3f'%Omega_m
        print 'Omega_l         = %.3f'%Omega_L
        print 'redshift        = %.3f'%redshift

    # find the numbers each cpu will work on
    array   = np.arange(0, filenum)
    numbers = np.where(array%nprocs==myrank)[0]

    # do a loop over the different realizations
    particles = 0
    for i in numbers:

        snapshot = snapshot_root + 'snapdir_%03d/snap_%03d.%d'%(snapnum,snapnum,i)
        pos = rs.read_block(snapshot, 'POS ', parttype=0, verbose=False)/1e3
        pos = pos.astype(np.float32)

        # read velocities and displace particle positions
        if redshift_space:
            vel = rs.read_block(snapshot, 'VEL ', parttype=0, verbose=False)/np.sqrt(1.0+redshift) #km/s
            RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

        # check if particles are in the region
        indexes_region = np.where((pos[:,0]>=x_min-padding) & (pos[:,0]<=x_max+padding) &\
                                  (pos[:,1]>=y_min-padding) & (pos[:,1]<=y_max+padding) &\
                                  (pos[:,2]>=z_min-padding) & (pos[:,2]<=z_max+padding))[0]

        # if particles are not in the region continue
        local_particles = indexes_region.shape[0]
        print 'Myrank = %d ---> num = %d ---> part = %ld'%(myrank,i,local_particles)
        if local_particles==0:  continue

        # find radii, HI and gas masses
        MHI  = rs.read_block(snapshot, 'NH  ', parttype=0, verbose=False)#HI/H
        mass = rs.read_block(snapshot, 'MASS', parttype=0, verbose=False)*1e10
        SFR  = rs.read_block(snapshot, 'SFR ', parttype=0, verbose=False)
        indexes = np.where(SFR>0.0)[0];  del SFR

        # find the metallicity of star-forming particles
        metals = rs.read_block(snapshot, 'GZ  ', parttype=0, verbose=False)
        metals = metals[indexes]/0.0127

        # find densities of star-forming particles: units of h^2 Msun/Mpc^3
        rho    = rs.read_block(snapshot, 'RHO ', parttype=0, verbose=False)*1e19
        Volume = mass/rho                            #(Mpc/h)^3
        radii  = (Volume/(4.0*np.pi/3.0))**(1.0/3.0) #Mpc/h 
        rho    = rho[indexes]                        #h^2 Msun/Mpc^3
        Volume = Volume[indexes]                     #(Mpc/h)^3

        # find volume and radius of star-forming particles
        radii_SFR  = (Volume/(4.0*np.pi/3.0))**(1.0/3.0) #Mpc/h 
            
        # find HI/H fraction for star-forming particles
        MHI[indexes] = HIL.Rahmati_HI_Illustris(rho, radii_SFR, metals, redshift, 
                                                h, TREECOOL_file, Gamma=None,
                                                fac=1, correct_H2=True) #HI/H
        MHI *= (0.76*mass)
            

        # select the particles belonging to the region
        pos   = pos[indexes_region]
        MHI   = MHI[indexes_region]
        radii = radii[indexes_region]
        mass  = mass[indexes_region]

        # write partial files        
        new_size = particles + local_particles    

        if particles==0:
            f = h5py.File(fout[:-5]+'_%d.hdf5'%myrank, 'w')
            f.create_dataset('pos',   data=pos,   maxshape=(None,3))
            f.create_dataset('M_HI',  data=MHI,   maxshape=(None,))
            f.create_dataset('radii', data=radii, maxshape=(None,))
            f.create_dataset('mass',  data=mass,  maxshape=(None,))
        else:
            f = h5py.File(fout[:-5]+'_%d.hdf5'%myrank, 'a')
            pos_f   = f['pos'];    pos_f.resize((new_size,3))
            M_HI_f  = f['M_HI'];   M_HI_f.resize((new_size,))
            radii_f = f['radii'];  radii_f.resize((new_size,))
            mass_f  = f['mass'];   mass_f.resize((new_size,))
            pos_f[particles:]   = pos
            M_HI_f[particles:]  = MHI
            radii_f[particles:] = radii
            mass_f[particles:]  = mass
        f.close()
        particles += local_particles
                
    comm.Barrier()

    # sum the particles found in each cpu
    All_particles = 0 
    All_particles = comm.reduce(particles, op=MPI.SUM, root=0)

    # Master will merge partial files into a file one
    if myrank==0:

        print 'Found %d particles'%All_particles
        f = h5py.File(fout,'w')
        
        f1 = h5py.File(fout[:-5]+'_0.hdf5','r')
        pos   = f1['pos'][:]
        M_HI  = f1['M_HI'][:]
        radii = f1['radii'][:]
        mass  = f1['mass'][:]
        f1.close()

        particles = mass.shape[0]
        pos_f   = f.create_dataset('pos',   data=pos,   maxshape=(None,3))
        M_HI_f  = f.create_dataset('M_HI',  data=M_HI,  maxshape=(None,))
        radii_f = f.create_dataset('radii', data=radii, maxshape=(None,))
        mass_f  = f.create_dataset('mass',  data=mass,  maxshape=(None,))

        for i in xrange(1,nprocs):
            f1 = h5py.File(fout[:-5]+'_%d.hdf5'%i,'r')
            pos   = f1['pos'][:]
            M_HI  = f1['M_HI'][:]
            radii = f1['radii'][:]
            mass  = f1['mass'][:]
            f1.close()
            
            size = mass.shape[0]
            
            pos_f.resize((particles+size,3));  pos_f[particles:] = pos
            M_HI_f.resize((particles+size,));  M_HI_f[particles:] = M_HI
            radii_f.resize((particles+size,)); radii_f[particles:] = radii
            mass_f.resize((particles+size,));  mass_f[particles:] = mass

            particles += size

        f.close()

        for i in xrange(nprocs):
            os.system('rm '+fout[:-5]+'_%d.hdf5'%i)