def compute_vf(snapshot, ptypes, grid, axis, MAS, fout):
    if not (os.path.exists(snapshot + '.0')): return 0

    # read header
    header = readgadget.header(snapshot)
    BoxSize = header.boxsize / 1e3  #Mpc/h
    Nall = header.nall  #Total number of particles
    Masses = header.massarr * 1e10  #Masses of the particles in Msun/
    redshift = header.redshift  #redshift of the snapshot

    # read positions and velocities
    pos = readgadget.read_block(snapshot, "POS ", ptypes) / 1e3  #Mpc/h
    vel = readgadget.read_block(snapshot, "VEL ", ptypes)  #km/s

    # compute density field
    df = np.zeros((grid, grid, grid), dtype=np.float32)
    MASL.MA(pos, df, BoxSize, MAS)
    df[np.where(df == 0)] = 1e-7  # to avoid dividing by 0

    # compute the velocity field
    vf = np.zeros((grid, grid, grid), dtype=np.float32)
    MASL.MA(pos, vf, BoxSize, MAS, W=vel[:, axis])
    vf = vf / df

    # save results to file
    np.save(fout, df)
Example #2
0
def Pk_comp(snapshot_fname,ptype,dims,do_RSD,axis,cpus,folder_out):

    # read relevant paramaters on the header
    print 'Computing power spectrum...'
    head     = readgadget.header(snapshot_fname)
    BoxSize  = head.boxsize/1e3 #Mpc/h
    Masses   = head.massarr*1e10 #Msun/h
    Nall     = head.nall;  Ntotal = np.sum(Nall,dtype=np.int64)
    Omega_m  = head.omega_m
    Omega_l  = head.omega_l
    redshift = head.redshift
    Hubble   = 100.0*np.sqrt(Omega_m*(1.0+redshift)**3+Omega_l)  #km/s/(Mpc/h)
    z        = '%.3f'%redshift
        
    # find output file name
    fout = folder_out+'/Pk_' + name_dict[str(ptype)]
    if do_RSD:  fout += ('_RS_axis=' + str(axis) + '_z=' + z + '.dat')
    else:       fout +=                           ('_z=' + z + '.dat')

    # read the positions of the particles
    pos = readgadget.read_block(snapshot_fname,"POS ",[ptype])/1e3 #Mpc/h
    print '%.3f < X [Mpc/h] < %.3f'%(np.min(pos[:,0]),np.max(pos[:,0]))
    print '%.3f < Y [Mpc/h] < %.3f'%(np.min(pos[:,1]),np.max(pos[:,1]))
    print '%.3f < Z [Mpc/h] < %.3f\n'%(np.min(pos[:,2]),np.max(pos[:,2]))

    # read the velocities of the particles
    if do_RSD:
        print 'moving particles to redshift-space...'
        vel = readgadget.read_block(snapshot_fname,"VEL ",[ptype]) #km/s
        RSL.pos_redshift_space(pos,vel,BoxSize,Hubble,redshift,axis)
        del vel;  print 'done'

    # define delta array
    delta = np.zeros((dims,dims,dims),dtype=np.float32)

    # when dealing with all particles take into account their different masses
    if ptype==-1:
        if Nall[0]==0: #if not hydro
            M = np.zeros(Ntotal,dtype=np.float32) #define the mass array
            offset = 0
            for ptype in [0,1,2,3,4,5]:
                M[offset:offset+Nall[ptype]] = Masses[ptype]
                offset += Nall[ptype]
        else:
            M = readgadget.read_block(snapshot_fname,"MASS",ptype=[-1])*1e10
        
        mean = np.sum(M,dtype=np.float64)/dims**3
        MASL.MA(pos,delta,BoxSize,'CIC',M); del pos,M

    else:  
        mean = len(pos)*1.0/dims**3
        MASL.MA(pos,delta,BoxSize,'CIC'); del pos

    # compute the P(k) and save results to file
    delta /= mean;  delta -= 1.0
    Pk = PKL.Pk(delta,BoxSize,axis=axis,MAS='CIC',threads=cpus);  del delta
    np.savetxt(fout,np.transpose([Pk.k3D, Pk.Pk[:,0], Pk.Pk[:,1], Pk.Pk[:,2],
                                  Pk.Nmodes3D]))
def find_pdf(snapshot, grid, MAS, do_RSD, axis, threads, ptype, fpdf,
             smoothing, Filter):

    if os.path.exists(fpdf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate the overdensity field
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    #delta -= 1.0

    # define the array containing the variance
    var = np.zeros(smoothing.shape[0], dtype=np.float64)
    var_log = np.zeros(smoothing.shape[0], dtype=np.float64)

    # do a loop over the different smoothing scales
    for i, smooth_scale in enumerate(smoothing):

        # smooth the overdensity field
        W_k = SL.FT_filter(BoxSize, smooth_scale, grid, Filter, threads)
        delta_smoothed = SL.field_smoothing(delta, W_k, threads)

        # compute the variance of the field
        var[i] = np.var(delta_smoothed)

        indexes = np.where(delta_smoothed > 0.0)
        var_log[i] = np.var(np.log10(delta_smoothed[indexes]))

    # save results to file
    np.savetxt(fpdf, np.transpose([smoothing, var, var_log]), delimiter='\t')
def find_Bk(snapshot, snapnum, Ngrid, Nmax, Ncut, step, do_RSD, axis, ptype,
            fbk):

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read the snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move positions to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate bispectrum
    b123out = pySpec.Bk_periodic(pos.T,
                                 Lbox=BoxSize,
                                 Ngrid=Ngrid,
                                 step=step,
                                 Ncut=Ncut,
                                 Nmax=Nmax,
                                 fft='pyfftw',
                                 nthreads=1,
                                 silent=False)

    i_k = b123out['i_k1']
    j_k = b123out['i_k2']
    l_k = b123out['i_k3']
    p0k1 = b123out['p0k1']
    p0k2 = b123out['p0k2']
    p0k3 = b123out['p0k3']
    b123 = b123out['b123']
    b_sn = b123out['b123_sn']
    q123 = b123out['q123']
    cnts = b123out['counts']

    hdr = ('matter bispectrum; k_f = 2pi/%.1f, Nhalo=%i' %
           (BoxSize, pos.shape[0]))
    np.savetxt(
        fbk,
        np.array([i_k, j_k, l_k, p0k1, p0k2, p0k3, b123, q123, b_sn, cnts]).T,
        fmt='%i %i %i %.5e %.5e %.5e %.5e %.5e %.5e %.5e',
        delimiter='\t',
        header=hdr)
Example #5
0
def find_pdf(snapshot, grid, MAS, do_RSD, axis, threads, ptype, fpdf,
             smoothing, Filter):

    if os.path.exists(fpdf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate the overdensity field
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    #delta -= 1.0

    # smooth the overdensity field
    W_k = SL.FT_filter(BoxSize, smoothing, grid, Filter, threads)
    delta_smoothed = SL.field_smoothing(delta, W_k, threads)

    bins = np.logspace(-2, 2, 100)
    pdf, mean = np.histogram(delta_smoothed, bins=bins)
    mean = 0.5 * (mean[1:] + mean[:-1])
    pdf = pdf * 1.0 / grid**3

    # save results to file
    np.savetxt(fpdf, np.transpose([mean, pdf]), delimiter='\t')
def find_CF(snapshot, snapnum, grid, MAS, do_RSD, axis, threads, ptype, fcf,
            save_multipoles):

    if os.path.exists(fcf): return 0

    # read header
    head = readgadget.header(snapshot)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall  #Total number of particles
    Masses = head.massarr * 1e10  #Masses of the particles in Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble

    # read snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

    # move particles to redshift-space
    if do_RSD:
        vel = readgadget.read_block(snapshot, "VEL ", ptype)  #km/s
        RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)

    # calculate CF
    delta = np.zeros((grid, grid, grid), dtype=np.float32)
    if len(ptype) > 1:  #for multiple particles read masses
        mass = np.zeros(pos.shape[0], dtype=np.float32)
        offset = 0
        for j in ptype:
            mass[offset:offset + Nall[j]] = Masses[j]
            offset += Nall[j]
        MASL.MA(pos, delta, BoxSize, MAS, W=mass)
    else:
        MASL.MA(pos, delta, BoxSize, MAS)
    delta /= np.mean(delta, dtype=np.float64)
    delta -= 1.0
    CF = PKL.Xi(delta, BoxSize, MAS, axis, threads)

    # save results to file
    if save_multipoles:
        np.savetxt(fcf,
                   np.transpose(
                       [CF.r3D, CF.xi[:, 0], CF.xi[:, 1], CF.xi[:, 2]]),
                   delimiter='\t')
    else:
        np.savetxt(fcf, np.transpose([CF.r3D, CF.xi[:, 0]]), delimiter='\t')
Example #7
0
def Pk_Gadget(snapshot_fname,
              dims,
              particle_type,
              do_RSD,
              axis,
              cpus,
              folder_out=None):

    # find folder to place output files. Default is current directory
    if folder_out is None: folder_out = os.getcwd()

    # for either one single species or all species use this routine
    if len(particle_type) == 1:
        Pk_comp(snapshot_fname, particle_type[0], dims, do_RSD, axis, cpus,
                folder_out)
        return None

    # read snapshot head and obtain BoxSize, Omega_m and Omega_L
    print('\nREADING SNAPSHOTS PROPERTIES')
    head = readgadget.header(snapshot_fname)
    BoxSize = head.boxsize / 1e3  #Mpc/h
    Nall = head.nall
    Masses = head.massarr * 1e10  #Msun/h
    Omega_m = head.omega_m
    Omega_l = head.omega_l
    redshift = head.redshift
    Hubble = 100.0 * np.sqrt(Omega_m *
                             (1.0 + redshift)**3 + Omega_l)  #km/s/(Mpc/h)
    h = head.hubble
    z = '%.3f' % redshift
    dims3 = dims**3

    # compute the values of Omega_cdm, Omega_nu, Omega_gas and Omega_s
    Omega_c = Masses[1] * Nall[1] / BoxSize**3 / rho_crit
    Omega_n = Masses[2] * Nall[2] / BoxSize**3 / rho_crit
    Omega_g, Omega_s = 0.0, 0.0
    if Nall[0] > 0:
        if Masses[0] > 0:
            Omega_g = Masses[0] * Nall[0] / BoxSize**3 / rho_crit
            Omega_s = Masses[4] * Nall[4] / BoxSize**3 / rho_crit
        else:
            # mass in Msun/h
            mass = readgadget.read_block(snapshot_fname, "MASS",
                                         ptype=[0]) * 1e10
            Omega_g = np.sum(mass, dtype=np.float64) / BoxSize**3 / rho_crit
            mass = readgadget.read_block(snapshot_fname, "MASS",
                                         ptype=[4]) * 1e10
            Omega_s = np.sum(mass, dtype=np.float64) / BoxSize**3 / rho_crit
            del mass

    # some verbose
    print('Omega_gas    = ', Omega_g)
    print('Omega_cdm    = ', Omega_c)
    print('Omega_nu     = ', Omega_n)
    print('Omega_star   = ', Omega_s)
    print('Omega_m      = ', Omega_g + Omega_c + Omega_n + Omega_s)
    print('Omega_m snap = ', Omega_m)

    # dictionary giving the value of Omega for each component
    Omega_dict = {0: Omega_g, 1: Omega_c, 2: Omega_n, 4: Omega_s}
    #####################################################################

    # define the array containing the deltas
    delta = [[], [], [],
             []]  #array containing the gas, CDM, NU and stars deltas

    # dictionary among particle type and the index in the delta and Pk arrays
    # delta of stars (ptype=4) is delta[3] not delta[4]
    index_dict = {0: 0, 1: 1, 2: 2, 4: 3}

    # define suffix here
    if do_RSD: suffix = '_RS_axis=' + str(axis) + '_z=' + z + '.dat'
    else: suffix = '_z=' + z + '.dat'
    #####################################################################

    # do a loop over all particle types and compute the deltas
    for ptype in particle_type:

        # read particle positions in #Mpc/h
        pos = readgadget.read_block(snapshot_fname, "POS ", [ptype]) / 1e3

        # move particle positions to redshift-space
        if do_RSD:
            vel = readgadget.read_block(snapshot_fname, "VEL ", [ptype])  #km/s
            RSL.pos_redshift_space(pos, vel, BoxSize, Hubble, redshift, axis)
            del vel

        # find the index of the particle type in the delta array
        index = index_dict[ptype]

        # compute mean number of particles per grid cell
        mean_number = len(pos) * 1.0 / dims3

        # compute the deltas
        delta[index] = np.zeros((dims, dims, dims), dtype=np.float32)
        MASL.MA(pos, delta[index], BoxSize, 'CIC')
        del pos
        delta[index] /= mean_number
        delta[index] -= 1.0
    #####################################################################

    #####################################################################
    # if there are two or more particles compute auto- and cross-power spectra
    for i, ptype1 in enumerate(particle_type):
        for ptype2 in particle_type[i + 1:]:

            # find the indexes of the particle types
            index1 = index_dict[ptype1]
            index2 = index_dict[ptype2]

            # choose the name of the output files
            fout1 = '/Pk_' + name_dict[str(ptype1)] + suffix
            fout2 = '/Pk_' + name_dict[str(ptype2)] + suffix
            fout12 = '/Pk_' + name_dict[str(ptype1) + str(ptype2)] + suffix
            fout1 = folder_out + fout1
            fout2 = folder_out + fout2
            fout12 = folder_out + fout12

            # some verbose
            print('\nComputing the auto- and cross-power spectra of types: '\
                ,ptype1,'-',ptype2)
            print('saving results in:')
            print(fout1, '\n', fout2, '\n', fout12)

            # This routine computes the auto- and cross-power spectra
            data = PKL.XPk([delta[index1], delta[index2]],
                           BoxSize,
                           axis=axis,
                           MAS=['CIC', 'CIC'],
                           threads=cpus)

            k = data.k3D
            Nmodes = data.Nmodes3D

            # save power spectra results in the output files
            np.savetxt(
                fout12,
                np.transpose([
                    k, data.XPk[:, 0, 0], data.XPk[:, 1, 0], data.XPk[:, 2, 0],
                    Nmodes
                ]))
            np.savetxt(
                fout1,
                np.transpose([
                    k, data.Pk[:, 0, 0], data.Pk[:, 1, 0], data.Pk[:, 2, 0],
                    Nmodes
                ]))
            np.savetxt(
                fout2,
                np.transpose([
                    k, data.Pk[:, 0, 1], data.Pk[:, 1, 1], data.Pk[:, 2, 1],
                    Nmodes
                ]))
    #####################################################################

    #####################################################################
    # compute the power spectrum of the sum of all components
    print('\ncomputing P(k) of all components')

    # define delta of all components
    delta_tot = np.zeros((dims, dims, dims), dtype=np.float32)

    Omega_tot = 0.0
    fout = folder_out + '/Pk_'
    for ptype in particle_type:
        index = index_dict[ptype]
        delta_tot += (Omega_dict[ptype] * delta[index])
        Omega_tot += Omega_dict[ptype]
        fout += name_dict[str(ptype)] + '+'

    delta_tot /= Omega_tot
    del delta
    fout = fout[:-1]  #avoid '+' in the end

    # compute power spectrum
    data = PKL.Pk(delta_tot, BoxSize, axis=axis, MAS='CIC', threads=cpus)
    del delta_tot

    # write P(k) to output file
    np.savetxt(
        fout + suffix,
        np.transpose([
            data.k3D, data.Pk[:, 0], data.Pk[:, 1], data.Pk[:, 2],
            data.Nmodes3D
        ]))
Example #8
0
    print i

    # if output folder does not exists, create it. Get name of output file
    folder_out = '%s/%s/%d' % (root_out, model, i)
    if not (os.path.exists(folder_out)): os.system('mkdir %s' % folder_out)
    fout = '%s/displacement_field_z=%s.npy' % (folder_out, z)
    if os.path.exists(fout): continue

    # find the name of the ICs and snapshot
    ICs_snapshot = '%s/%s/%d/ICs/ics' % (root, model, i)
    snapshot = '%s/%s/%d/snapdir_%03d/snap_%03d' % (root, model, i, snapnum,
                                                    snapnum)

    # read the positions and IDs of the ICs
    pos_ICs = readgadget.read_block(ICs_snapshot, "POS ", ptype) / 1e3  #Mpc/h
    IDs_ICs = readgadget.read_block(ICs_snapshot, "ID  ",
                                    ptype) - 1  #IDs begin from 0

    # sort the ICs particles by IDs
    indexes = np.argsort(IDs_ICs)
    pos_ICs = pos_ICs[indexes]
    del IDs_ICs

    # read the positions and IDs of the z=0 snapshot
    pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h
    IDs = readgadget.read_block(snapshot, "ID  ",
                                ptype) - 1  #Make IDs begin from 0

    # sort the particles by IDs
    indexes = np.argsort(IDs)
Example #9
0
            os.system('mkdir %s/%s/%d' % (root_out, cosmo, i))

        # find the snapshot name
        snapshot = '%s/%s/%d/snapdir_%03d/snap_%03d' % (root, cosmo, i,
                                                        snapnum, snapnum)

        # find name of output file
        fout = '%s/%s/%d/Pk_marked_z=%s.txt' % (root_out, cosmo, i, z)
        if os.path.exists(fout): continue

        # read header
        header = readgadget.header(snapshot)
        BoxSize = header.boxsize / 1e3  #Mpc/h

        # read positions, velocities and IDs of the particles
        pos = readgadget.read_block(snapshot, "POS ", ptype) / 1e3  #Mpc/h

        # compute the density field
        delta = np.zeros((grid, grid, grid), dtype=np.float32)
        MASL.MA(pos, delta, BoxSize, MAS)
        delta /= np.mean(delta, dtype=np.float64)
        delta -= 1.0

        # smoothing the density field (check if numbers are still >-1)
        delta_smoothed = SL.field_smoothing(delta, W_k, threads)

        # find the value of the smoothed density field on top of each particle
        weight = np.zeros(pos.shape[0], dtype=np.float32)
        MASL.CIC_interp(delta_smoothed, BoxSize, pos, weight)

        # compute the density field weighing each particle by its weigth
Example #10
0
# do a loop over the different realizations
for i in numbers:

    print i

    # find the name of the output file
    fout = '%s/LR/displacement_%d_z=0.npy' % (root_out, i)
    if os.path.exists(fout): continue

    # find the name of the snapshots
    snap1 = '%s/%s/%d/ICs/ics' % (root, cosmo, i)
    snap2 = '%s/%s/%d/snapdir_%03d/snap_%03d' % (root, cosmo, i, snapnum,
                                                 snapnum)

    # read the positions and IDs of the ICs. Sort positions by particle IDs
    pos1 = readgadget.read_block(snap1, "POS ",
                                 ptype) / 1e3  #positions in Mpc/h
    IDs1 = readgadget.read_block(snap1, "ID  ", ptype) - 1  #normalized
    indexes = np.argsort(IDs1)
    pos1 = pos1[indexes]

    # read the positions and IDs of the z=0 snap. Sort positions by particle IDs
    pos2 = readgadget.read_block(snap2, "POS ",
                                 ptype) / 1e3  #positions in Mpc/h
    IDs2 = readgadget.read_block(snap2, "ID  ", ptype) - 1  #normalized
    indexes = np.argsort(IDs2)
    pos2 = pos2[indexes]

    # compute displacement field and care about boundary conditions
    disp = pos2 - pos1
    indexes = np.where(disp > middle)
    disp[indexes] -= BoxSize