Exemplo n.º 1
0
def load_stars(snapnum, snapfile):
    """
    Parameters
    ----------
    snapnum : int
        snapshot number of simulation
    snapfile : str
        path to snapshot directory
    
    Returns
    -------
    indx : np.array
    """
    s = read_hdf5.snapshot(snapnum, snapfile)
    s.read([
        "Coordinates", "Masses", "Velocities", "GFM_StellarFormationTime",
        "GFM_StellarPhotometrics"
    ],
           parttype=[4])
    age = (s.data['GFM_StellarFormationTime']['stars']).astype('float64')
    star_pos = s.data['Coordinates']['stars'][age >= 0, :]
    star_vel = s.data['Velocities']['stars'][age >= 0, :]
    star_mass = s.data['Masses']['stars'][age >= 0]  #[Msol]
    star_mag = s.data['GFM_StellarPhotometrics']['stars'][age >=
                                                          0]  #[Bol. Mag]

    star = {
        'Pos': star_pos,
        'Vel': star_vel,
        'Mass': star_mass,
        'Mag': star_mag,
    }
    return star
Exemplo n.º 2
0
    def load_snapshot(self, snapnum, hfdir):
        """
        Load data of subhalos and particles
        """
        self.snapshot = read_hdf5_eagle.snapshot(snapnum, part_file)

        # Subhalos
        if self.halofinder == 'Rockstar':
            df = pd.read_csv(args["rksdir"] + 'halos_%d.dat' % snapnum,
                             sep='\s+',
                             skiprows=np.arange(1, 16))
            df = df.sort_values(by=['#ID'])
            df = df.set_index('#ID')
            subhalo = hdata.loc[hdata['#ID'] == HFID]
            HPos = subhalo[['X', 'Y', 'Z']].values[0]
            Vrms = subhalo['Vrms'].values[0]
            M200 = subhalo['Mvir'].values[0]
            hvel = subhalo[['VX', 'VY', 'VZ']].values[0]
            epva = subhalo[['A[x]', 'A[y]', 'A[z]']].values[0]
            epvb = subhalo[['B[x]', 'B[y]', 'B[z]']].values[0]
            epvc = subhalo[['C[x]', 'C[y]', 'C[z]']].values[0]

        elif self.halofinder == 'Subfind':
            s = read_hdf5.snapshot(snapnum, hfdir)
            s.group_catalog([
                "SubhaloIDMostbound", "SubhaloPos", "SubhaloVel",
                "SubhaloMass", "SubhaloVelDisp", "SubhaloHalfmassRadType",
                "SubhaloLenType"
            ])
            df = pd.DataFrame({
                'HF_ID':
                self.snapshot.cat['SubhaloIDMostbound'],
                'Vrms': (s.cat['SubhaloVelDisp']),
                'Mass':
                s.cat['SubhaloMass'],
                'Npart': (self.snapshot.cat["SubhaloLenType"][:, 4])
            })
            subhalo_offset = (np.cumsum(df['Npart'].values) - \
                              df['Npart'].values).astype(int)
            df['offset'] = pd.Series(subhalo_offset,
                                     index=dfpart.index,
                                     dtype=int)
            s1 = pd.Series(dict(
                list(
                    enumerate(s.cat['SubhaloPos'] *
                              self.snapshot.header.hubble * 1e-3))),
                           index=df.index)
            df['Pos'] = s1
            s1 = pd.Series(dict(list(enumerate(s.cat['SubhaloVel']))),
                           index=dmdf.index)
            df['Vel'] = s1
            df = df.sort_values(by=['HF_ID'])
            df = df.set_index('HF_ID')

            indx = self.df.index.intersection(df.index)
            self.df["M200"]
Exemplo n.º 3
0
def load_dm(snapnum, snapfile):
    s = read_hdf5.snapshot(snapnum, snapfile)
    s.read([
        "Coordinates",
        "Masses",
    ], parttype=[1])
    dm_pos = s.data['Coordinates']['dm']
    dm_mass = s.data['Masses']['dm']  #[Msol]

    dm = {'Pos': dm_pos, 'Mass': dm_mass}
    return dm
Exemplo n.º 4
0
    def __init__(self, h5_dir: str, snapnum: int):
        '''
		Args:
			h5_dir: directory containing tng data
			snapnum: Snapshot number to read
		'''

        self.snapnum = snapnum

        self.snapshot = read_hdf5.snapshot(snapnum, h5_dir)

        # Useful definitions
        self.dm = 1
        self.stars = 4
        self.dm_particle_mass = self.snapshot.header.massarr[self.dm] * 1.0e10
        self.output_dir = '/cosma6/data/dp004/dc-cues1/tng_dataframes/'
Exemplo n.º 5
0
def load_gas(snapnum, snapfile):
    """
    Parameters
    ----------
    snapnum : int
        snapshot number of simulation
    snapfile : str
        path to snapshot directory
    
    Returns
    -------
    indx : np.array
    """
    s = read_hdf5.snapshot(snapnum, snapfile)
    s.read([
        "Coordinates",
        "Masses",
    ], parttype=[0])

    gas = {
        'Pos': s.data['Coordinates']['gas'],
        'Mass': s.data['Masses']['gas'],
    }
    return gas
Exemplo n.º 6
0
def load_subhalos(snapnum, snapfile, lafile, strong_lensing=1):
    """
    Parameters
    ----------
    snapnum : int
        snapshot number of simulation
    snapfile : str
        path to snapshot directory
    lafile : str
        path to lensing analysis results
    subhalo_tag : boolean
        switch to ouput all subhalo acting as
        strong lenses or not

    Returns
    -------
    df : pd.DataFrame
    """
    # load snapshot data
    s = read_hdf5.snapshot(snapnum, snapfile)
    s.group_catalog([
        "SubhaloIDMostbound", "SubhaloPos", "SubhaloVel", "SubhaloMass",
        "SubhaloVelDisp", "SubhaloHalfmassRadType", "SubhaloLenType",
        "GroupLenType", "GroupNsubs", "GroupFirstSub"
    ])
    df = pd.DataFrame({
        'HF_ID': s.cat['SubhaloIDMostbound'],
        'Vrms': s.cat['SubhaloVelDisp'],
        'Mass': s.cat['SubhaloMass'],
        'Rstellarhalfmass': s.cat['SubhaloHalfmassRadType'][:, 4],
        'sNpart': s.cat["SubhaloLenType"][:, 4].astype('int'),
        'dmNpart': s.cat["SubhaloLenType"][:, 1].astype('int')
    })
    s1 = pd.Series(dict(list(enumerate(s.cat['SubhaloPos']))), index=df.index)
    df['Pos'] = s1
    s1 = pd.Series(dict(list(enumerate(s.cat['SubhaloVel']))), index=df.index)
    df['Vel'] = s1
    df['Index'] = df.index.values
    df = df.sort_values(by=['HF_ID'])
    df = df.set_index('HF_ID')

    if strong_lensing == True:
        LA = pickle.load(open(lafile, "rb"))  #, encoding='latin1')
        print('Processing the following file: \n %s' % (lafile))
        print('which contains %d lenses' % len(LA['HF_ID'][:]))
        print('with max. einst. radius: %f', np.max(LA['Sources']['Rein'][:]))

        # Output only subhalos acting as gravitational strong lenses
        # Find intersection
        ladf = pd.DataFrame({
            'HF_ID': LA["HF_ID"],
            'ZS': LA["zs"],
            'FOV': LA["FOV"],
            'Rein': LA["Sources"]["Rein"],
        })
        ladf['Nimg'] = pd.Series(0, index=ladf.index)
        ladf['theta'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        ladf['delta_t'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        ladf['mu'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        ladf['TCC'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        ladf['DMAP'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        for ll in range(len(ladf.index.values)):
            ladf['Nimg'][ll] = len(LA['Sources']['mu'][ll])
            ladf['theta'][ll] = LA['Sources']['theta'][ll]
            ladf['delta_t'][ll] = LA['Sources']['delta_t'][ll]
            ladf['mu'][ll] = LA['Sources']['mu'][ll]
            ladf['TCC'][ll] = LA['Sources']['TCC'][ll]
            ladf['DMAP'][ll] = LA['DMAP'][ll]

        ladf = ladf.sort_values(by=['HF_ID'])
        ladf = ladf.set_index('HF_ID')  # may contain dublicates
        #df = df[df.index.isin(ladf.index.values)]
        # Attach df to ladf
        ladf = ladf.join(df, how='inner')

        ladf['ZL'] = pd.Series(s.header.redshift, index=ladf.index)
        #df['Rein'] = ladf['Rein']
        #df['ZS'] = ladf['ZS']
        #df['Nimg'] = ladf['Nimg']
        #df['FOV'] = ladf['FOV']
        #df['theta'] = ladf['theta']
        #df['delta_t'] = ladf['delta_t']
        #df['mu'] = ladf['mu']
        #df['DMAP'] = ladf['DMAP']

        # Find bound particles for lenses
        BPF = BoundParticleFinder(s)
        subhalo_particles = BPF.find_bound_subhalo_particles(
            ladf['Index'].values, 4)
        ladf['BPF'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
        for ll in range(len(ladf.index.values)):
            ladf['BPF'][ll] = subhalo_particles[ll].astype(int)

        # Initialize
        ladf['Mlens'] = pd.Series(0, index=ladf.index)
    elif strong_lensing == False:
        ladf = pd.read_hdf(lafile, key='nonlenses')
        ladf = ladf.sort_values(by=['HF_ID'])
        ladf = ladf.set_index('HF_ID')
        print('Processing the following file: \n %s' % (lafile))
        print('which contains %d subhalos' % len(ladf.index.values))

        # Output only subhalos do not act as gravitational strong lenses
        # Attach df to ladf
        ladf = ladf.rename(columns={'zl': 'ZL'})
        ladf = ladf.rename(columns={'zs': 'ZS'})
        ladf = ladf.join(df, how='inner')

    # Initialize
    ladf['Mdyn'] = pd.Series(0, index=ladf.index)
    ladf['Mtotal'] = pd.Series(0, index=ladf.index)
    ladf['Vrms_Rein'] = pd.Series(0, index=ladf.index)
    ladf['Vrms_Rhm'] = pd.Series(0, index=ladf.index)
    ladf['PA'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['Ellip2D'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['Eccen2D'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['Ellip3D'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['Eccen3D'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['Prolat3D'] = pd.Series(0.0, dtype=np.float, index=ladf.index)
    ladf['VelProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['VelProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['VrmsProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['VrmsProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SDensProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SDensProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SMProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SMProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SCVProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['SCVProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['DMDensProfRad'] = pd.Series(0, index=df.index).astype(np.ndarray)
    ladf['DMDensProfMeas'] = pd.Series(0, index=df.index).astype(np.ndarray)
    ladf['GDensProfRad'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['GDensProfMeas'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['power_law_index'] = pd.Series(0, index=ladf.index).astype(np.ndarray)
    ladf['power_law_profile'] = pd.Series(0,
                                          index=ladf.index).astype(np.ndarray)
    #df['DMMProfRad'] = pd.Series(0, index=df.index).astype(np.ndarray)
    #df['DMMProfMeas'] = pd.Series(0, index=df.index).astype(np.ndarray)
    #df['DMCVProfRad'] = pd.Series(0, index=df.index).astype(np.ndarray)
    #df['DMCVProfMeas'] = pd.Series(0, index=df.index).astype(np.ndarray)
    return ladf
Exemplo n.º 7
0
def create_density_maps():
    # Get command line arguments
    args = {}
    if comm_rank == 0:
        print(':Registered %d processes' % comm_size)
        args["simdir"]       = sys.argv[1]
        args["hfdir"]        = sys.argv[2]
        args["lcdir"]        = sys.argv[3]
        args["ncells"]       = int(sys.argv[4])
        args["outbase"]      = sys.argv[5]
    args = comm.bcast(args)
    label = args["simdir"].split('/')[-2].split('_')[2]
   
    # Organize devision of Sub-&Halos over Processes on Proc. 0
    if comm_rank == 0:
        # Characteristics
        hflabel = whichhalofinder(args["lcdir"])

        # Load LightCone Contents
        lchdf = h5py.File(args["lcdir"], 'r')
        lcdfhalo = pd.DataFrame(
                {'HF_ID' : lchdf['Halo_Rockstar_ID'].value,
                 'ID' : lchdf['Halo_ID'].value,
                 'Halo_z' : lchdf['Halo_z'].value,
                 'snapnum' : lchdf['snapnum'].value,
                 #'Vrms' : lchdf['VelDisp'].value,
                 #'fov_Mpc' : lchdf['FOV'][:, 1],
                 ('HaloPosBox', 'X') : lchdf['HaloPosBox'][:, 0],
                 ('HaloPosBox', 'Y') : lchdf['HaloPosBox'][:, 1],
                 ('HaloPosBox', 'Z') : lchdf['HaloPosBox'][:, 2],})

        nhalo_per_snapshot = lcdfhalo.groupby('snapnum').count()['HF_ID']

        print('Number of Sub-&Halos in Snapshot:')
        print(nhalo_per_snapshot.values)
        print(np.sum(nhalo_per_snapshot.values))
        print(nhalo_per_snapshot.index.values)
        print(nhalo_per_snapshot.values[0])

        if nhalo_per_snapshot.values[0] > comm_size:
            hist_edges =  procdiv.histedges_equalN(lcdfhalo[('HaloPosBox', 'X')],
                                                   comm_size)
            SH = procdiv.cluster_subhalos(lcdfhalo['ID'].values,
                                          #lcdfhalo['Vrms'],
                                          #lcdfhalo['fov_Mpc'],
                                          lcdfhalo[('HaloPosBox', 'X')].values,
                                          lcdfhalo[('HaloPosBox', 'X')].values,
                                          lcdfhalo[('HaloPosBox', 'Y')].values,
                                          lcdfhalo[('HaloPosBox', 'Z')].values,
                                          hist_edges, comm_size)
            print('dict test', SH.keys())
        elif nhalo_per_snapshot.values[0] < comm_size:
            pass

        # Define Cosmology
        cosmo = LambdaCDM(H0=s.header.hubble*100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
        cosmosim = {'omega_M_0' : s.header.omega_m,
                    'omega_lambda_0' : s.header.omega_l,
                    'omega_k_0' : 0.0,
                    'h' : s.header.hubble}
        redshift = s.header.redshift
        print(': Redshift: %f' % redshift)

        # Sort Sub-&Halos over Processes
        df = pd.read_csv(args["hfdir"]+'halos_%d.dat' % args["snapnum"],
                         sep='\s+', skiprows=16,
                         usecols=[0, 2, 4, 9, 10, 11],
                         names=['ID', 'Mvir', 'Vrms', 'X', 'Y', 'Z'])
        df = df[df['Mvir'] > 5e11]
        sh_id = df['ID'].values.astype('float64')
        sh_vrms = df['Vrms'].values.astype('float64')
        sh_x = df['X'].values.astype('float64')
        sh_y = df['Y'].values.astype('float64')
        sh_z = df['Z'].values.astype('float64')
        del df
        hist_edges =  procdiv.histedges_equalN(sh_x, comm_size)
        SH = cluster_subhalos(sh_id, sh_vrms, sh_x, sh_y, sh_z, hist_edges, comm_size)
      
        # Load simulation
        s = read_hdf5.snapshot(45, args["simdir"])
        s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
               parttype=[0, 1, 4])
        scale = 1e-3*s.header.hubble
        
        # Calculate overlap for particle cuboids
        c = (const.c).to_value('km/s')
        fov_rad = 4*np.pi*(np.percentile(sh_vrms, 90)/c)**2
        sh_dist = (cosmo.comoving_distance(redshift)).to_value('Mpc')
        alpha = 6  # multiplied by 4 because of Oguri&Marshall
        overlap = 0.5*alpha*fov_rad*sh_dist  #[Mpc] half of field-of-view
        print('Cuboids overlap is: %f [Mpc]' % overlap)

        # Sort Particles over Processes
        ## Dark Matter
        dm_mass = (s.data['Masses']['dm']).astype('float64')
        dm_x = (s.data['Coordinates']['dm'][:, 0]*scale).astype('float64')
        dm_y = (s.data['Coordinates']['dm'][:, 1]*scale).astype('float64')
        dm_z = (s.data['Coordinates']['dm'][:, 2]*scale).astype('float64')
        dm_mass, dm_x, dm_y, dm_z, dm_split_size_1d, dm_split_disp_1d = cluster_particles(
                dm_mass, dm_x, dm_y, dm_z, hist_edges, comm_size)
        ## Gas
        gas_mass = (s.data['Masses']['gas']).astype('float64')
        gas_x = (s.data['Coordinates']['gas'][:, 0]*scale).astype('float64')
        gas_y = (s.data['Coordinates']['gas'][:, 1]*scale).astype('float64')
        gas_z = (s.data['Coordinates']['gas'][:, 2]*scale).astype('float64')
        gas_mass, gas_x, gas_y, gas_z, gas_split_size_1d, gas_split_disp_1d = cluster_particles(gas_mass, gas_x, gas_y, gas_z, hist_edges, comm_size)
        ## Stars
        star_mass = (s.data['Masses']['stars']).astype('float64')
        star_x = (s.data['Coordinates']['stars'][:, 0]*scale).astype('float64')
        star_y = (s.data['Coordinates']['stars'][:, 1]*scale).astype('float64')
        star_z = (s.data['Coordinates']['stars'][:, 2]*scale).astype('float64')
        star_age = s.data['GFM_StellarFormationTime']['stars']
        star_x = star_x[star_age >= 0]  #[Mpc]
        star_y = star_y[star_age >= 0]  #[Mpc]
        star_z = star_z[star_age >= 0]  #[Mpc]
        star_mass = star_mass[star_age >= 0]
        del star_age
        star_mass, star_x, star_y, star_z, star_split_size_1d, star_split_disp_1d = cluster_particles(star_mass, star_x, star_y, star_z, hist_edges, comm_size)

    else:
        c=None; alpha=None; overlap=None
        cosmosim=None; cosmo=None; redshift=None; hist_edges=None;
        SH=None;
        dm_mass=None; dm_x=None; dm_y=None; dm_z=None
        dm_split_size_1d=None; dm_split_disp_1d=None
        gas_mass=None; gas_x=None; gas_y=None; gas_z=None
        gas_split_size_1d=None; gas_split_disp_1d=None
        star_mass=None; star_x=None; star_y=None; star_z=None
        star_split_size_1d=None; star_split_disp_1d=None
      
    # Broadcast variables over all processors
    sh_split_size_1d = comm.bcast(SH['sh_split_size_1d'], root=0)
    sh_split_disp_1d = comm.bcast(SH['sh_split_disp_1d'], root=0)
    dm_split_size_1d = comm.bcast(dm_split_size_1d, root=0)
    dm_split_disp_1d = comm.bcast(dm_split_disp_1d, root=0)
    gas_split_size_1d = comm.bcast(gas_split_size_1d, root=0)
    gas_split_disp_1d = comm.bcast(gas_split_disp_1d, root=0)
    star_split_size_1d = comm.bcast(star_split_size_1d, root=0)
    star_split_disp_1d = comm.bcast(star_split_disp_1d, root=0)
    c = comm.bcast(c, root=0)
    alpha = comm.bcast(alpha, root=0)
    overlap = comm.bcast(overlap, root=0)
    cosmo = comm.bcast(cosmo, root=0)
    redshift = comm.bcast(redshift, root=0)
    hist_edges = comm.bcast(hist_edges, root=0)

    # Initiliaze variables for each processor
    sh_id_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_vrms_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_x_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_y_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_z_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    dm_mass_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_x_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_y_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_z_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    gas_mass_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_x_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_y_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_z_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    star_mass_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_x_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_y_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_z_local = np.zeros((int(star_split_size_1d[comm_rank])))
Exemplo n.º 8
0
    def __init__(
        self,
        overdensity,
        h5_dir="/cosma7/data/TNG/TNG300-1-Dark/",
        snapshot_number=99
        ):
        """
        Load data of particles that belong to resolved halos
        Args:
            overdensity: int
                Out to which radius halo properties are measured.
                R200 - default SubFind radii
                R2500 - where galaxy formation takes place
            h5_dir: str
                path to particle data
            snapshot_number: int
        """
        self.snapshot = read_hdf5.snapshot(
            snapshot_number, h5_dir, check_total_particle_number=True
        )
        
        # read simulation settings
        self.h = self.snapshot.header.hubble

        # read subfind & fof data
        self.snapshot.group_catalog(
            [
                "Group_M_Crit200",
                "Group_R_Crit200",
                "GroupLenType",
                "GroupFirstSub",
                "GroupPos",
                "GroupVel",
                "SubhaloVmax",
            ]
        )
        # associate particles to subfind & fof objects
        self.N_particles = self.snapshot.cat['GroupLenType'][:,1].astype(np.int64)
        self.firstsub = (self.snapshot.cat['GroupFirstSub']).astype(np.int64)
        self.cum_N_particles= np.cumsum(self.N_particles) - self.N_particles
        self.ID_DMO = np.arange(0, len(self.N_particles))
        # get only resolved halos
        self.halo_mass_thresh = 5.0e10 
        self.halo_mass_cut = (
                self.snapshot.cat["Group_M_Crit200"][:]*self.snapshot.header.hubble>self.halo_mass_thresh
        )
        # filter subfind & fof objects
        self.N_particles = self.N_particles[self.halo_mass_cut]
        self.firstsub = self.firstsub[self.halo_mass_cut]
        self.cum_N_particles = self.cum_N_particles[self.halo_mass_cut]
        self.ID_DMO = self.ID_DMO[self.halo_mass_cut]
        self.halo_pos = self.snapshot.cat['GroupPos'][self.halo_mass_cut] 
        self.halo_vel = self.snapshot.cat['GroupVel'][self.halo_mass_cut] 
        self.r200c = self.snapshot.cat['Group_R_Crit200'][self.halo_mass_cut]
        self.m200c = self.snapshot.cat['Group_M_Crit200'][self.halo_mass_cut] * \
            self.h
        self.vmax = self.snapshot.cat['SubhaloVmax'][self.firstsub]
        self.N_halos = len(self.vmax)

        # read particle data
        self.snapshot.read(["Coordinates", "Velocities"], parttype=[1])
        self.coordinates = self.snapshot.data['Coordinates']['dm'][:]
        self.velocities = self.snapshot.data['Velocities']['dm'][:]
        
        # dm particle mass
        self.Mpart = (
            self.snapshot.header.massarr[1] * 1e10 / self.h
        )  #[Msun] particle mass
       
        # if halo properties aren't to be found at r200c
        if overdensity != 200:
            self.overdensity = overdensity
            self.find_overdensity_radius()
Exemplo n.º 9
0
    def __init__(self, simulation, snapnum, nepoch):
        """ 
        """
        self.simulation = simulation

        if (self.simulation == "christian_dm"):
            self.sh_orig_file = "/cosma5/data/dp004/dc-oles1/dhalo/data/GR/"
            self.sh_reor_file = "/cosma5/data/dp004/dc-beck3/Galaxy_Evolution/" \
                                "SubFind/dm_only/L62_N512_GR/subfind.0.hdf5"
            self.mt_file = "/cosma5/data/dp004/dc-oles1/dhalo/out/trees/GR/" \
                           "treedir_075/tree_075.0.hdf5"
            self.snapshot = read_hdf5.snapshot(snapnum, self.sh_orig_file)
            print(':Loading snapshot at z=%f' % self.snapshot.header.redshift)
        elif (self.simulation == 'christian_fp'):
            self.sh_orig_file = "/cosma6/data/dp004/dc-arno1/SZ_project/" \
                                "full_physics/L62_N512_GR_kpc/"
            self.sh_reor_file = "/cosma5/data/dp004/dc-beck3/Galaxy_Evolution/" \
                                "SubFind/fp/L62_N512_GR/subfind.0.hdf5"
            self.mt_file = "/cosma/home/dp004/dc-oles1/var/data/dhalo/out/" \
                           "trees/full_physics_GR/treedir_045/tree_045.0.hdf5"
            self.snapshot = read_hdf5.snapshot(snapnum, self.sh_orig_file)
            print(':Loading snapshot at z=%f' % self.snapshot.header.redshift)
        elif (self.simulation == "EAGLE"):
            self.sh_file = "/gpfs/data/Eagle/yanTestRuns/MergerTree/" \
                           "Dec14/L0100N1504/EAGLE_L0100N1504_db.hdf5"
            self.part_file = "/cosma5/data/Eagle/ScienceRuns/Planck1/" \
                             "L0100N1504/PE/REFERENCE/data/"
            self.mt_file = "/gpfs/data/Eagle/yanTestRuns/MergerTree/" \
                           "Dec14/L0100N1504/EAGLE_L0100N1504_db.hdf5"
            self.snapshot = read_hdf5_eagle.snapshot(snapnum, self.part_file)
            print(':Loading snapshot at z=%f' % self.snapshot.header.redshift)

        # Load the useful fields
        if (self.simulation == 'christian_dm'):
            # First Hand Data
            ## Snapshot particle related data
            self.snapshot.group_catalog(["SubhaloLenType",
                                         "SubhaloPos",
                                         "SubhaloIDMostbound"])
            dfpart = pd.DataFrame(
                    {'id_mostbound' : (self.snapshot.cat['SubhaloIDMostbound']).astype(np.int64),
                     'X' : (self.snapshot.cat['SubhaloPos'][:, 0]).astype(np.float64),
                     'Y' : (self.snapshot.cat['SubhaloPos'][:, 1]).astype(np.float64),
                     'Z' : (self.snapshot.cat['SubhaloPos'][:, 2]).astype(np.float64),
                     'n_part' : (self.snapshot.cat["SubhaloLenType"][:, 1]).astype(np.int64)}
                    )
            print(':Original data-set contains %d subhalos' % (dfpart.shape[0]))
            subhalo_offset = (np.cumsum(dfpart['n_part'].values) - \
                              dfpart['n_part'].values).astype(int)
            dfpart['offset'] = pd.Series(subhalo_offset, index=dfpart.index, dtype=int)
            self.snapshot.read(["Coordinates"], parttype=[1])
            del subhalo_offset, self.snapshot.cat

            ## Read with respect to the merger-tree re-ordered SubFind file
            hdf = h5py.File(self.sh_reor_file, 'r')
            self.df = pd.DataFrame({'snapnum' : hdf['Subhalo']['SnapNum'][:],
                                    'mass_total' : hdf['Subhalo']['SubhaloMass'][:],
                                    'halfmassrad' : hdf['Subhalo']['SubhaloHalfmassRad'][:],
                                    'sigma' : hdf['Subhalo']['SubhaloVelDisp'][:],
                                    'nodeIndex' : hdf['Subhalo']['nodeIndex'][:],
                                    'id_mostbound' : hdf['Subhalo']['SubhaloIDMostbound'][:]})
            _indx = self.df[self.df['snapnum'] == snapnum].index
            self.df = self.df[self.df['snapnum'] == snapnum]
            self.df.index = range(len(self.df.index))

            spin = hdf['Subhalo']['SubhaloSpin'][:]
            spin = spin[_indx, :]
            spin = [np.sqrt((spin[ii, 0]**2 + spin[ii, 1]**2 + spin[ii, 2]**2)/3) for ii in range(len(spin))]
            self.df['spin'] = pd.Series(spin, index=self.df.index, dtype=float)
            
            ## Filter
            _indx = self.match_halos(self.df['id_mostbound'].values, dfpart['id_mostbound'].values)
            self.df = self.df.iloc[_indx]
            self.df.index = range(len(self.df.index))

            _indx = self.match_halos(dfpart['id_mostbound'].values, self.df['id_mostbound'].values)
            dfpart = dfpart.iloc[_indx]
            dfpart.index = range(len(dfpart.index))

            # Second Hand Data
            self.principal_axis(dfpart)
            self.progenitors(snapnum-nepoch, snapnum)
            print(':Final data-set contains %d subhalos' % (self.df.shape[0]))

        elif (self.simulation == 'christian_fp'):
            # First Hand Data
            ## Snapshot particle related data
            self.snapshot.group_catalog(["SubhaloLenType",
                                         "SubhaloPos",
                                         "SubhaloIDMostbound"])
            dfpart = pd.DataFrame(
                    {'id_mostbound' : (self.snapshot.cat['SubhaloIDMostbound']).astype(np.int64),
                     'X' : (self.snapshot.cat['SubhaloPos'][:, 0]).astype(np.float64),
                     'Y' : (self.snapshot.cat['SubhaloPos'][:, 1]).astype(np.float64),
                     'Z' : (self.snapshot.cat['SubhaloPos'][:, 2]).astype(np.float64),
                     'n_part' : (self.snapshot.cat["SubhaloLenType"][:, 1]).astype(np.int64)}
                    )
            print(':Original data-set contains %d subhalos' % (dfpart.shape[0]))
            subhalo_offset = (np.cumsum(dfpart['n_part'].values) - \
                              dfpart['n_part'].values).astype(int)
            dfpart['offset'] = pd.Series(subhalo_offset, index=dfpart.index, dtype=int)
            self.snapshot.read(["Coordinates"], parttype=[1])
            del subhalo_offset, self.snapshot.cat

            ## Read with respect to the merger-tree re-ordered SubFind file
            hdf = h5py.File(self.sh_reor_file, 'r')
            self.df = pd.DataFrame({'snapnum' : hdf['Subhalo']['SnapNum'][:],
                                    'mass_total' : hdf['Subhalo']['SubhaloMass'][:],
                                    'sfr' : hdf['Subhalo']['SubhaloSFR'][:],
                                    'halfmassrad' : hdf['Subhalo']['SubhaloHalfmassRadType'][:, 4],
                                    'sigma' : hdf['Subhalo']['SubhaloVelDisp'][:],
                                    'nodeIndex' : hdf['Subhalo']['nodeIndex'][:],
                                    'id_mostbound' : hdf['Subhalo']['SubhaloIDMostbound'][:]})
            _indx = self.df[self.df['snapnum'] == snapnum].index
            self.df = self.df[self.df['snapnum'] == snapnum]
            self.df.index = range(len(self.df.index))

            print('::::: SFR:', self.df['sfr'])

            spin = hdf['Subhalo']['SubhaloSpin'][:]
            spin = spin[_indx, :]
            spin = [np.sqrt((spin[ii, 0]**2 + spin[ii, 1]**2 + spin[ii, 2]**2)/3) for ii in range(len(spin))]
            self.df['spin'] = pd.Series(spin, index=self.df.index, dtype=float)
            
            ## Filter
            _indx = self.match_halos(self.df['id_mostbound'].values, dfpart['id_mostbound'].values)
            self.df = self.df.iloc[_indx]
            self.df.index = range(len(self.df.index))

            _indx = self.match_halos(dfpart['id_mostbound'].values, self.df['id_mostbound'].values)
            dfpart = dfpart.iloc[_indx]
            dfpart.index = range(len(dfpart.index))

            # Second Hand Data
            self.principal_axis(dfpart)
            self.progenitors(snapnum-nepoch, snapnum)
            print(':Final data-set contains %d subhalos' % (self.df.shape[0]))
        
        elif (self.simulation == 'EAGLE'):
            # First Hand Data
            ## Snapshot particle related data
            #self.snapshot.group_catalog(["SubLengthType",
            #                             "CentreOfMass",
            #                             "IDMostBound"])
            #dfpart = pd.DataFrame(
            #        {'id_mostbound' : (self.snapshot.cat['IDMostBound']).astype(np.int64),
            #         'X' : (self.snapshot.cat['CentreOfMass'][:, 0]).astype(np.float64),
            #         'Y' : (self.snapshot.cat['CentreOfMass'][:, 1]).astype(np.float64),
            #         'Z' : (self.snapshot.cat['CentreOfMass'][:, 2]).astype(np.float64),
            #         'n_part' : (self.snapshot.cat["SubLengthType"][:, 1]).astype(np.int64)}
            #        )
            #print(':Original data-set contains %d subhalos' % (dfpart.shape[0]))
            #subhalo_offset = (np.cumsum(dfpart['n_part'].values) - \
            #                  dfpart['n_part'].values).astype(int)
            #dfpart['offset'] = pd.Series(subhalo_offset, index=dfpart.index, dtype=int)
            #self.snapshot.read(["Coordinates"], parttype=[1])
            #del subhalo_offset
            
            ## Read with respect to the merger-tree re-ordered SubFind file
            hdf = h5py.File(self.sh_file, 'r')
            self.df = pd.DataFrame(
                    {'snapnum' : hdf['Subhalo']['SnapNum'][:],
                     'mass_total' : hdf['Subhalo']['Mass'][:],
                     'halfmassrad' : hdf['Subhalo']['HalfMassRad_Star'][:],
                     'sigma' : hdf['Subhalo']['StellarVelDisp'][:],
                     'DhaloIndex' : hdf['Subhalo']['DhaloIndex'][:],
                     'DescendantID' : hdf['MergerTree']['DescendantID'][:],
                     'HaloID' : hdf['MergerTree']['HaloID'][:],
                     'LastProgID' : hdf['MergerTree']['LastProgID'][:],
                     'TopLeafID' : hdf['MergerTree']['TopLeafID'][:]}
                    )
           

            self.df = self.df[self.df['snapnum'] == snapnum]
            self.df.index = range(len(self.df.index))
            print(':Original data-set contains %d subhalos' % (self.df.shape[0]))
            
            #self.dfmt = pd.DataFrame(
            #        {'snapnum' : hdf['MergerTree']['DescendantID'][:],
            #         'DescendantID' : hdf['MergerTree']['DescendantID'][:],
            #         'HaloID' : hdf['MergerTree']['HaloID'][:],
            #         'LastProgID' : hdf['MergerTree']['LastProgID'][:]}
            #        )
            #self.df = self.df[(self.df['snapnum'] >= snapnum-nepoch) & \
            #                 (self.df['snapnum'] <= snapnum)]
            #self.df.index = range(len(self.df.index))
            
            self.progenitors(snapnum-nepoch, snapnum, self.simulation)
        
            print(':Final data-set contains %d subhalos' % (self.df.shape[0]))
Exemplo n.º 10
0
def lensing_signal():
    # Get command line arguments
    args = {}
    args["simdir"] = sys.argv[1]
    args["snapnum"] = int(sys.argv[2])
    args["ladir"] = sys.argv[3]
    args["rksdir"] = sys.argv[4]
    args["outbase"] = sys.argv[5]
    args["radius"] = sys.argv[6]
    #args["zs"]      = sys.argv[7]

    snapfile = args["simdir"] + 'snapdir_%03d/snap_%03d'
    # Units of Simulation
    scale = rf.simulation_units(args["simdir"])

    # Cosmological Parameters
    s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
    cosmo = LambdaCDM(H0=s.header.hubble * 100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    h = s.header.hubble
    zl = s.header.redshift
    print('Analyse sub-&halo at z=%f' % zl)

    Lens = {"HFID": [], "Vrms": [], "FOV": [], "ELIP": [], "PA": []}
    # Run through LensingMap output files
    for lm_file in glob.glob(args["ladir"] + "*" + "409.pickle"):
        # Load LensingMap Contents
        LM = pickle.load(open(lm_file, 'rb'))
        print('Processing the following file: \n %s' % (lm_file))
        print('which contains %d lenses' % len(LM['HF_ID'][:]))
        print('with max. einst. radius: %f', np.max(LM['Sources']['Rein'][:]))
        label = args["simdir"].split('/')[-2].split('_')[-2]

        snapnum = LM['snapnum']
        dfh = pd.read_csv(args["rksdir"] + 'halos_%d.dat' % snapnum,
                          sep='\s+',
                          skiprows=np.arange(1, 16))

        print(LM['FOV'])
        # Run through lenses
        for ll in range(len(LM['HF_ID'])):
            HFID = int(LM['HF_ID'][ll])
            FOV = int(LM['FOV'][ll])  #[arcsec]

            # Load Halo Properties
            indx = dfh['#ID'][dfh['#ID'] == HFID].index[0]
            HPos = [dfh['X'][indx], dfh['Y'][indx], dfh['Z'][indx]]
            Vrms = dfh['Vrms'][indx]  #[km/s]
            hvel = pd.concat([dfh['VX'], dfh['VY'], dfh['VZ']],
                             axis=1).loc[[indx]].values
            epveca = pd.concat([dfh['A[x]'], dfh['A[y]'], dfh['A[z]']],
                               axis=1).loc[[indx]].values
            epvecb = pd.concat([dfh['B[x]'], dfh['B[y]'], dfh['B[z]']],
                               axis=1).loc[[indx]].values
            epvecc = pd.concat([dfh['C[x]'], dfh['C[y]'], dfh['C[z]']],
                               axis=1).loc[[indx]].values

            # Ellipticity (should be between 0.0-0.8)
            epval = [
                np.linalg.norm(vec)
                for vec in np.vstack((epveca, epvecb, epvecc))
            ]
            print('epval', epval)
            indx = np.argsort(epval)
            a = epval[indx[2]]  # major
            b = epval[indx[1]]  # intermediate
            c = epval[indx[0]]  # minor
            ellipticity = (a - b) / (2 * (a + b + c))
            print(a, b, c)
            print('ellipticity', ellipticity)

            # Position-Angle [rad]
            # Based on Stark 1977; Binggeli 1980
            """
            [vu, phi] = Euler_angles(sub_av, sub_bv, sub_cv)
            j = e1**2*e2**2*np.sin(vu)**2 + \
                e1**2*np.cos(vu)*np.cos(phi)**2 + \
                e2**2*np.cos(vu)**2*np.sin(phi)**2
            l = e1**2*np.sin(phi)**2 + e2**2*np.cos(phi)**2
            k = (e1**2 - e2**2)*np.sin(phi)*np.cos(phi)*np.cos(vu)
            ep = np.sqrt((j + l + np.sqrt((j - l)**2 + 4*k**2)) /
                         (j + l - np.sqtr((j - l)**2 + 4*k**2)))
            # Observed ellipticity
            e = 1 - 1/ep
            f = e1**2*np.sin(vu)**2*np.sin(phi)**2 +
                e2**2*np.sin(vu)**2*np.cos(phi)**2 + np.cos(vu)**2
            """
            PA = 0.

            Lens['HFID'].append(HFID)
            Lens['FOV'].append(FOV)
            Lens['Vrms'].append(Vrms)
            Lens['ELIP'].append(ellipticity)
            Lens['PA'].append(PA)
        df = pd.DataFrame.from_dict(Lens)
        zllabel = str(zl).replace('.', '')[:3].zfill(3)
        fname = (args["outbase"]+'LPPBox_%s_zl%s.txt' % \
                (label, zllabel))
        df.to_csv(fname, header=True, index=None, sep=' ', mode='w')
Exemplo n.º 11
0
def lensing_signal():
    # Get command line arguments
    args = {}
    args["snapnum"] = int(sys.argv[1])
    args["simdir"] = sys.argv[2]
    args["hfname"] = sys.argv[3]
    args["dmdir"] = sys.argv[4]
    args["ncells"] = int(sys.argv[5])
    args["outbase"] = sys.argv[6]
    args["lenses"] = int(sys.argv[7])

    # Organize devision of Sub-&Halos over Processes on Proc. 0
    s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
    unitlength = lt.define_unit(s.header.unitlength, args["hfname"])

    fname = glob.glob(args["dmdir"] + '*.h5')
    dmfile = []
    for ff in range(len(fname)):
        #if (os.path.getsize(fname[ff])/(1024*1024.0)) < 1:
        #    fname[ff] = 0
        #else:
        dmfile.append(fname[ff])
    dmfile = np.asarray(dmfile)

    # Cosmological Parameters
    cosmo = LambdaCDM(H0=s.header.hubble * 100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    redshift = s.header.redshift
    print('Analyse sub-&halo at z=%f' % redshift)

    # Calculate critical surface density
    zl = redshift
    zs = 0.409
    sigma_cr = lt.sigma_crit(zl, zs, cosmo).to_value('Msun %s-2' % unitlength)

    lenslistinit()
    srclistinit()
    # Run through files
    for ff in range(len(dmfile)):
        print('\n')
        print('------------- \n Reading File: %s' %
              (fname[ff].split('/')[-2:]))
        dmf = h5py.File(dmfile[ff], 'r')
        print('Nr. of galxies:', len(dmf['HFID']))

        # Run through lenses
        for ll in range(len(dmf['HFID'])):
            # convert. box size and pixels size from ang. diam. dist. to arcsec
            FOV_arc = (dmf['FOV'][ll]/cf.Da(zl, unitlength, cosmo) * \
                       u.rad).to_value('arcsec')
            dsx_arc = FOV_arc / args["ncells"]  #[arcsec] pixel size
            # initialize the coordinates of grids (light rays on lens plan)
            lp1, lp2, lpv = cf.make_r_coor(FOV_arc, args["ncells"])

            # Calculate convergence map
            kappa = dmf['DMAP'][ll] / sigma_cr
            fig = plt.figure()
            ax = fig.add_subplot(111)

            # Calculate Deflection Maps
            alpha1, alpha2, mu_map, phi, detA, lambda_t, lpv = lt.cal_lensing_signals(
                kappa, FOV_arc, args["ncells"], lpv)
            lp2, lp1 = np.meshgrid(lpv, lpv)  #[arcsec] finer resol. in centre
            # Mapping light rays from image plane to source plan
            [sp1, sp2] = [lp1 - alpha1, lp2 - alpha2]  #[arcsec]

            # Calculate Einstein Radii
            Ncrit, curve_crit_tan, caustic, Rein = lt.einstein_radii(
                lp1, lp2, sp1, sp2, detA, lambda_t, cosmo, ax, 'med')
            # Calculate Time-Delay and Magnification
            beta = np.array([0., 0.])
            n_imgs, delta_t, mu, theta = lt.timedelay_magnification(
                mu_map, phi, dsx_arc, args["ncells"], lp1, lp2, alpha1, alpha2,
                beta, zs, zl, cosmo)
            if args["lenses"] == True:
                if n_imgs > 1:
                    #TODO: does n_imgs include the original
                    print('Galaxy %d/%d got %d multiple lensed images' % \
                            (ll, len(dmf['HFID']), n_imgs))
                    # Tree Branch 1
                    l_HFID.append(int(dmf['HFID'][ll]))
                    l_fov.append(FOV_arc)
                    # Tree Branch 2
                    l_srcbeta.append(beta)
                    l_tancritcurves.append(curve_crit_tan)
                    l_caustic.append(caustic)
                    l_einsteinradius.append(Rein)
                    # Tree Branch 3
                    l_srctheta.append(theta)
                    l_deltat.append(delta_t)
                    l_mu.append(mu)
            elif args["lenses"] == False:
                if n_imgs == 1:
                    l_HFID.append(int(dmf['HFID'][ll]))
                    l_fov.append(FOV_arc)
                    l_srcbeta.append(beta)
                    l_tancritcurves.append(curve_crit_tan)
                    l_caustic.append(caustic)
                    l_einsteinradius.append(Rein)
                    l_srctheta.append(theta)
                    l_deltat.append(delta_t)
                    l_mu.append(mu)

    ########## Save to File ########
    if args["lenses"] == True:
        print('%d galaxies produce multiple imaged SN Ia' % (len(l_HFID)))
        tree = plant_Tree()
        # Tree Branches of Node 1 : Lenses
        tree['HF_ID'] = l_HFID
        tree['snapnum'] = args["snapnum"]
        tree['zl'] = redshift
        tree['zs'] = zs
        tree['FOV'] = l_fov  #[arcsec] for glafic
        # Tree Branches of Node 1 : Sources
        tree['Sources']['beta'] = l_srcbeta
        tree['Sources']['CAU'] = l_caustic
        tree['Sources']['TCC'] = l_tancritcurves
        tree['Sources']['Rein'] = l_einsteinradius
        for imgs in range(len(l_mu)):
            # Tree Branches of Node 2 : Multiple Images
            tree['Sources']['theta'][imgs] = l_srctheta[imgs]
            tree['Sources']['delta_t'][imgs] = l_deltat[imgs]
            tree['Sources']['mu'][imgs] = l_mu[imgs]
        simlabel = args["simdir"].split('/')[-2].split('_')[2]
        zllabel = str(redshift).replace('.', '')[:3].zfill(3)
        zslabel = '{:<03d}'.format(int(str(zs).replace('.', '')))
        filename = args["outbase"]+'LM_%s_%s_zl%szs%s.pickle' % \
                (simlabel, 'lens', zllabel, zslabel)
        filed = open(filename, 'wb')
        pickle.dump(tree, filed)
        filed.close()
        plt.close(fig)
    elif args["lenses"] == False:
        print('%d galaxies produce single imaged SN Ia' % (len(l_HFID)))
        dicts = {}
        dicts['HF_ID'] = l_HFID
        dicts['snapnum'] = args["snapnum"]
        dicts['zl'] = redshift
        dicts['zs'] = zs
        dicts['FOV'] = l_fov  #[arcsec] for glafic
        dicts['beta'] = l_srcbeta
        dicts['CAU'] = l_caustic
        dicts['TCC'] = l_tancritcurves
        dicts['Rein'] = l_einsteinradius
        dicts['theta'] = l_srctheta
        dicts['delta_t'] = l_deltat
        dicts['mu'] = l_mu
        df = pd.DataFrame(data=dicts)
        simlabel = args["simdir"].split('/')[-2].split('_')[2]
        zllabel = str(redshift).replace('.', '')[:3].zfill(3)
        zslabel = '{:<03d}'.format(int(str(zs).replace('.', '')))
        filename = args["outbase"]+'LM_%s_%s_zl%szs%s.h5' % \
                (simlabel, 'nonlens', zllabel, zslabel)
        df.to_hdf(filename, key='nonlenses')
        plt.close(fig)
Exemplo n.º 12
0
snapnum = 45

###############################################################################
# Subfind Results
lc_dir = '/cosma5/data/dp004/dc-beck3/LightCone/full_physics/'
hf_name = 'Subfind'
lc_file = lc_dir+hf_name+'/LC_SN_L62_N512_GR_kpc_rndseed1.h5'
LC = rf.LightCone_with_SN_lens(lc_file, hf_name)
print('length', len(LC['Rhalfmass']))

###############################################################################
# Subfind
hfdir = '/cosma6/data/dp004/dc-arno1/SZ_project/full_physics/L62_N512_GR_kpc/'
blockprint()
s = read_hdf5.snapshot(snapnum, hfdir)
s.group_catalog(["SubhaloVelDisp", "SubhaloHalfmassRad",
                 "SubhaloMass", "SubhaloPos", "GroupPos", "GroupMass"])
enableprint()
Subfind= {'Pos' : s.cat["SubhaloPos"]*s.header.hubble*1e-3,
          'Mass' : s.cat["SubhaloMass"],
          'Vrms' : s.cat["SubhaloVelDisp"]}

###############################################################################
# Rockstar
hfdir = ('/cosma5/data/dp004/dc-beck3/rockstar/full_physics/L62_N512_GR_kpc/' + \
         'halos_%d.dat' % snapnum)
data = open(hfdir, 'r')
data = data.readlines()
subpos = []
subMvir = []
Exemplo n.º 13
0
    LCSettings)

################################################################################
# Run through simulations
for sim in range(len(sim_dir)):
    # File for lensing-maps
    lm_dir = HQ_dir + '/LensingMap/' + sim_phy[sim] + hfname + '/' + sim_name[
        sim] + '/'
    # Simulation Snapshots
    snapfile = sim_dir[sim] + 'snapdir_%03d/snap_%03d'

    # Units of Simulation
    scale = rf.simulation_units(sim_dir[sim])

    # Cosmological Parameters
    s = read_hdf5.snapshot(45, sim_dir[sim])
    cosmo = LambdaCDM(H0=s.header.hubble * 100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    h = s.header.hubble

    HF_ID = []
    HaloLCID = []
    SrcID = []
    Mdyn = []
    Mlens = []
    # Run through LensingMap output files
    #for lm_file in glob.glob(lm_dir+'LM_1_Proc_*_0.pickle'):
    for lm_file in glob.glob(lm_dir + 'LM_new_GR.pickle'):
        # Load LensingMap Contents
        LM = pickle.load(open(lm_file, 'rb'))
Exemplo n.º 14
0
def lensing_signal():
    # Get command line arguments
    args = {}
    #args["snapnum"]      = int(sys.argv[1])
    #args["ncells"]       = int(sys.argv[2])
    #args["simdir"]       = sys.argv[3]
    #args["dmdir"]        = sys.argv[4]
    #args["outbase"]      = sys.argv[5]
    
    args["snapnum"]      = 40
    args["ncells"]       = 1024
    args["simdir"]       = "/cosma6/data/dp004/dc-arno1/SZ_project/full_physics/L62_N512_GR_kpc/"
    args["dmdir"]        = "/cosma5/data/dp004/dc-beck3/StrongLensing/DensityMap/full_physics/L62_N512_GR_kpc/z_40/"
    args["outbase"]      = "/cosma5/data/dp004/dc-beck3/StrongLensing/LensingMap/full_physics/Rockstar/L62_N512_GR_kpc/Box/"
   
    # Organize devision of Sub-&Halos over Processes on Proc. 0
    s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
    fname = glob.glob(args["dmdir"]+'*.h5')
    ffname = []
    for ff in range(len(fname)):
        if (os.path.getsize(fname[ff])/(1024*1024.0)) < 1:
            fname[ff] = 0
        else:
            ffname.append(fname[ff])
    ffname = np.asarray(ffname)
    print(ffname)

    # Cosmological Parameters
    cosmo = LambdaCDM(H0=s.header.hubble*100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    redshift = s.header.redshift

    # Calculate critical surface density
    zl = redshift
    zs = 2.
    sigma_cr = sigma_crit(zl, zs, cosmo).to_value('Msun Mpc-2')
    
    lenslistinit(); srclistinit()
    # Run through files
    for ff in range(len(ffname))[:]:
        print('Reading File: %s' % (fname[ff].split('/')[-2:]))
        dmf = h5py.File(ffname[ff], 'r')
    
        print(len(dmf['subhalo_id'][:]))
        # Run through lenses
        for ll in range(len(dmf['subhalo_id']))[1:]:
            print('Works on lens %d with ID %d' % \
                    (ll, dmf['subhalo_id'][ll]))
            # convert. box size and pixels size from ang. diam. dist. to arcsec
            FOV_arc = (dmf['fov_width'][ll]/cf.Da(zl, cosmo)*u.rad).to_value('arcsec')  #[arcsec] box size
            dsx_arc = FOV_arc/args["ncells"]  #[arcsec] pixel size
            # initialize the coordinates of grids (light rays on lens plan)
            #lp1, lp2 = cf.make_r_coor(FOV_arc, args["ncells"])  #[arcsec]
            lpv = np.linspace(-(FOV_arc-dsx_arc)/2, (FOV_arc-dsx_arc)/2, args["ncells"])
            lp1, lp2 = np.meshgrid(lpv, lpv)  #[arcsec]

            # Calculate convergence map
            kappa = dmf['density_map'][ll]/sigma_cr
            #print('The Kappa place has a max of %f and min of %f' % 
            #        (np.max(kappa), np.min(kappa)))
            fig = plt.figure()
            ax = fig.add_subplot(111)
            
            # Calculate Deflection Maps
            alpha1, alpha2, mu_map, phi, detA, lambda_t = cal_lensing_signals(kappa,
                                                                              FOV_arc,
                                                                              args["ncells"]) 
            # Calculate Einstein Radii
            print('finish cal_lensing_signals')
            Ncrit, curve_crit, curve_crit_tan, Rein = einstein_radii(lp1, lp2,
                                                                     detA,
                                                                     lambda_t,
                                                                     zl, cosmo,
                                                                     ax, 'med')
            print('finish einstein_radii')
            # Calculate Time-Delay and Magnification
            snia_pos = np.array([0, 0, 0])
            n_imgs, delta_t, mu, theta, beta = timedelay_magnification(
                    mu_map, phi, dsx_arc, args["ncells"],
                    lp1, lp2, alpha1, alpha2, snia_pos, zs, zl, cosmo)
            print('finish timedelay_magnification')
            if n_imgs > 1:
                print(dmf['subhalo_id'][ll])
                print('11111')
                l_HFID.append(int(dmf['subhalo_id'][ll]))
                print('22222')
                print('timedelay_magnification', n_imgs)
                # Tree Branch 1
                #l_HFID.append(int(dmf['subhalo_id'][ll]))
                # Tree Branch 2
                l_srcbeta.append(beta)
                l_tancritcurves.append(curve_crit_tan)
                l_einsteinradius.append(Rein)
                # Tree Branch 3
                l_srctheta.append(theta)
                l_deltat.append(delta_t)
                l_mu.append(mu)

    
    ########## Save to File ########
    print('Plant tree of %d lenses' % (len(l_HFID)))
    tree = plant_Tree()
    # Tree Branches of Node 1 : Lenses
    tree['HF_ID'] = l_HFID
    tree['snapnum'] = args["snapnum"]
    tree['zl'] = redshift
    tree['zs'] = 2.
    # Tree Branches of Node 1 : Sources
    tree['Sources']['beta'] = l_srcbeta
    tree['Sources']['TCC'] = l_tancritcurves
    tree['Sources']['Rein'] = l_einsteinradius
    for imgs in range(len(l_mu)):
        # Tree Branches of Node 2 : Multiple Images
        tree['Sources']['theta'][imgs] = l_srctheta[imgs]
        tree['Sources']['delta_t'][imgs] = l_deltat[imgs]
        tree['Sources']['mu'][imgs] = l_mu[imgs]
    print('tree created')
    label = args["simdir"].split('/')[-2].split('_')[2]
    filename = args["outbase"]+'LM_%s_z%d.pickle' % (label, args["snapnum"])
    filed = open(filename, 'wb')
    pickle.dump(tree, filed)
    filed.close()
    plt.close(fig)
    print('plt close')
Exemplo n.º 15
0
def subhalo_data(hfdir, hfname, snapnum, h, unit):
    """
    Input:
        hfdir: halo finder output directory
        hfname: halo finder name
        snapnum: snapshot number
        h: hubble parameter
        unit: length units of halo positions in simulation
    """
    exp = np.floor(np.log10(np.abs(unit))).astype(int)

    if hfname == 'Rockstar':
        # [X, Y, Z] in [Mpc]
        hffile = hfdir + 'halos_%d.dat' % snapnum
        df = pd.read_csv(hffile,
                         sep='\s+',
                         skiprows=16,
                         usecols=[0, 2, 4, 9, 10, 11],
                         names=['ID', 'Mvir', 'Vrms', 'X', 'Y', 'Z'])
        df = df[df['Mvir'] > 3e11]
        if exp == 23:  #[Mpc]
            pass
        elif exp == 21:  #[kpc]
            df.loc[:, 'X'] *= 1e3 / h
            df.loc[:, 'Y'] *= 1e3 / h
            df.loc[:, 'Z'] *= 1e3 / h
        else:
            raise Exception('This unit can not be convertet for Rockstar')

    elif hfname == 'Subfind':
        s = read_hdf5.snapshot(snapnum, hfdir)
        s.group_catalog([
            "SubhaloIDMostbound", "SubhaloPos", "SubhaloMass",
            "SubhaloVelDisp", "GroupFirstSub"
        ])
        indx = s.cat['GroupFirstSub'].astype(int)
        df = pd.DataFrame({
            'ID': s.cat['SubhaloIDMostbound'][indx],
            'Vrms': s.cat['SubhaloVelDisp'][indx],
            'X': s.cat['SubhaloPos'][indx, 0],
            'Y': s.cat['SubhaloPos'][indx, 1],
            'Z': s.cat['SubhaloPos'][indx, 2],
            'Mass': s.cat['SubhaloMass'][indx]
        })
        df = df[df['Mass'] > 5e11]
        if exp == 21:  #simulation in [kpc]
            pass
        elif exp == 23:  #simulation in [Mpc]
            df.loc[:, 'X'] *= 1e-3 * h
            df.loc[:, 'Y'] *= 1e-3 * h
            df.loc[:, 'Z'] *= 1e-3 * h
        else:
            raise Exception('This unit can not be convertet for Subfind')

    SH = {
        'ID': df['ID'].values.astype('float64'),  #for MPI.DOUBLE datatype
        'Vrms': df['Vrms'].values.astype('float64'),
        'X': df['X'].values.astype('float64'),
        'Y': df['Y'].values.astype('float64'),
        'Z': df['Z'].values.astype('float64')
    }
    del df
    return SH
Exemplo n.º 16
0
def lensing_signal():
    # Get command line arguments
    args = {}
    args["snapnum"] = int(sys.argv[1])
    args["simdir"] = sys.argv[2]
    args["ladir"] = sys.argv[3]
    args["hfname"] = sys.argv[4]
    args["hfdir"] = sys.argv[5]
    args["outbase"] = sys.argv[6]
    args["radius"] = sys.argv[7]
    args["lenses"] = int(sys.argv[8])
    snapfile = args["simdir"] + 'snapdir_%03d/snap_%03d'
    label = args["simdir"].split('/')[-2].split('_')[-2]

    # Units of Simulation
    scale = rf.simulation_units(args["simdir"])

    # Cosmological Parameters
    s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
    cosmo = LambdaCDM(H0=s.header.hubble * 100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    h = s.header.hubble

    # Stellar Data
    stars = load.load_stars(args["snapnum"], args["simdir"])
    dm = load.load_dm(args["snapnum"], args["simdir"])
    gas = load.load_gas(args["snapnum"], args["simdir"])

    indxdrop = []  # collect indices of subhalos falling through criterias
    if args["lenses"] == 1:
        lafile = glob.glob(args["ladir"] + "*" + "_lens_" + "*" +
                           "409.pickle")[0]
        lenses = load.load_subhalos(args["snapnum"],
                                    args["simdir"],
                                    lafile,
                                    strong_lensing=1)
        # Run through lenses
        for ll in range(len(lenses.index.values)):
            print('Lenses: %f' % (ll / len(lenses.index.values)))
            lens = lenses.iloc[ll]

            if isinstance(lens, (pd.core.series.Series)):
                indx = load.select_particles(stars['Pos'], lens['Pos'],
                                             lens['Rstellarhalfmass'] * 1.5,
                                             'sphere')
                halo_stars = {
                    'Pos': stars['Pos'][indx, :],
                    'Vel': stars['Vel'][indx, :],
                    'Mass': stars['Mass'][indx]
                }
                halo_stars['Pos'] -= lens['Pos']

                indx = load.select_particles(dm['Pos'], lens['Pos'],
                                             lens['Rein'], 'sphere')
                halo_dm = {'Pos': dm['Pos'][indx, :], 'Mass': dm['Mass'][indx]}
                halo_dm['Pos'] -= lens['Pos']

                indx = load.select_particles(gas['Pos'], lens['Pos'],
                                             lens['Rein'], 'sphere')
                halo_gas = {
                    'Pos': gas['Pos'][indx, :],
                    'Mass': gas['Mass'][indx]
                }
                halo_gas['Pos'] -= lens['Pos']

                lenses, indxdrop = load.add_properties(halo_stars, halo_dm,
                                                       halo_gas, lens, lenses,
                                                       cosmo, s, indxdrop, ll,
                                                       args["lenses"])
            else:
                print('!!!!!!! SOS !!!!!!!!!')

        lenses = lenses.drop(indxdrop)
        print('Saving %d lenses to .hdf5' % (len(lenses.index.values)))
        zllabel = str(lens['ZL']).replace('.', '')[:3].zfill(3)
        zslabel = '{:<03d}'.format(
            int(str(lenses['ZS'].values[0]).replace('.', '')))
        fname = (args["outbase"]+'LPPBox_%s_%s_zl%szs%s.h5' % \
                (label, 'lens', zllabel, zslabel))
        lenses.to_hdf(fname, key='lenses', mode='w')

    if args["lenses"] == 0:
        print(args["ladir"] + "*" + "_nonlens_" + "*" + "409.h5")
        print(glob.glob(args["ladir"] + "*" + "_nonlens_" + "*" + "409.h5"))
        lafile = glob.glob(args["ladir"] + "*" + "_nonlens_" + "*" +
                           "409.h5")[0]
        subhalos = load.load_subhalos(args["snapnum"],
                                      args["simdir"],
                                      lafile,
                                      strong_lensing=0)
        # Run through subhalos
        for ll in range(len(subhalos.index.values)):
            print('Non-Lenses: %f' % (ll / len(subhalos.index.values)))
            subhalo = subhalos.iloc[ll]

            if isinstance(subhalo, (pd.core.series.Series)):
                indx = load.select_particles(stars['Pos'], subhalo['Pos'],
                                             subhalo['Rstellarhalfmass'] * 1.5,
                                             'sphere')
                halo_stars = {
                    'Pos': stars['Pos'][indx, :],
                    'Vel': stars['Vel'][indx, :],
                    'Mass': stars['Mass'][indx]
                }
                halo_stars['Pos'] -= subhalo['Pos']

                #indx = load.select_particles(
                #        dm['Pos'], subhalo['Pos'],
                #        subhalo['Rstellarhalfmass']*1.5,
                #        'sphere')
                #halo_dm = {'Pos' : dm['Pos'][indx, :],
                #           'Vel' : dm['Vel'][indx, :],
                #           'Mass' : dm['Mass'][indx]}
                #halo_dm['Pos'] -= subhalo['Pos']

                subhalos, indxdrop = load.add_properties(
                    halo_stars, subhalo, subhalos, cosmo, s, indxdrop, ll,
                    args["lenses"])
            else:
                print('!!!!!!! SOS !!!!!!!!!')

        subhalos = subhalos.drop(indxdrop)
        print('Saving %d Subhalos to .hdf5' % (len(subhalos.index.values)))
        zllabel = str(zl).replace('.', '')[:3].zfill(3)
        zslabel = '{:<03d}'.format(
            int(str(subhalos['ZS'].values[0]).replace('.', '')))
        fname = (args["outbase"]+'LPPBox_%s_%s_zl%szs%s.h5' % \
                (label, 'nonlens', zllabel, zslabel))
        subhalos.to_hdf(fname, key='subhalos', mode='w')
Exemplo n.º 17
0
def lensing_signal():
    # Get command line arguments
    args = {}
    #args["snapnum"]      = int(sys.argv[1])
    args["simdir"] = sys.argv[1]
    args["ladir"] = sys.argv[2]
    args["rksdir"] = sys.argv[3]
    args["outbase"] = sys.argv[4]
    args["radius"] = sys.argv[5]

    snapfile = args["simdir"] + 'snapdir_%03d/snap_%03d'

    # Units of Simulation
    scale = rf.simulation_units(args["simdir"])

    # Cosmological Parameters
    s = read_hdf5.snapshot(45, args["simdir"])
    cosmo = LambdaCDM(H0=s.header.hubble * 100,
                      Om0=s.header.omega_m,
                      Ode0=s.header.omega_l)
    h = s.header.hubble

    Lens = {
        "HF_ID": [],
        "LC_ID": [],
        "SrcID": [],
        "n_imgs": [],
        "M200": [],
        "zl": [],
        "zs": [],
        "Mdyn_rks": [],
        "Mdyn_stellar": [],
        "Mlens": [],
        "Vrms_stellar": [],
        "Vrms_rks": [],
        "Rein": []
    }
    label = args["simdir"].split('/')[-2].split('_')[-2]
    # Run through LensingMap output files
    for lm_file in glob.glob(args["ladir"] + 'LM_' + label + ".pickle"):
        # Load LensingMap Contents
        LM = pickle.load(open(lm_file, 'rb'))
        print('Processing the following file: \n %s' % (lm_file))
        print('which contains %d lenses' % len(LM['HF_ID'][:]))
        print('with redshifts from %f to %f' % \
                (np.min(LM['zl'][:]), np.max(LM['zl'][:])))
        if False not in (np.diff(LM['snapnum'][:]) <= 0):
            pass
        else:
            order = np.asarray(np.argsort(LM['snapnum'][:]))
            LM['HF_ID'] = [LM['HF_ID'][oo] for oo in order]
            LM['zl'] = [LM['zl'][oo] for oo in order]
            LM['Sources']['Src_ID'] = [
                LM['Sources']['Src_ID'][oo] for oo in order
            ]
            LM['Sources']['zs'] = [LM['Sources']['zs'][oo] for oo in order]
            LM['Sources']['mu'] = [LM['Sources']['mu'][oo] for oo in order]
            LM['Sources']['Rein'] = [LM['Sources']['Rein'][oo] for oo in order]
            LM['snapnum'] = [LM['snapnum'][oo] for oo in order]
            assert False not in (np.diff(LM['snapnum'][:]) >= 0)

        #TODO: sanity check remove after it works
        #lcsndir = '/cosma5/data/dp004/dc-beck3/StrongLensing/LightCone/full_physics/Rockstar/LC_SN_L62_N512_GR_kpc_1.h5'
        #lcsnf = h5py.File(lcsndir, 'r')
        #lcsndf1 = pd.DataFrame({'HF_ID' : lcsnf['Halo_Rockstar_ID'],
        #                       'LC_ID' : lcsnf['Halo_ID'],
        #                       'snapnum' : lcsnf['snapnum'],
        #                       'zl' : lcsnf['Halo_z'],
        #                       'Rvir' : lcsnf['Rvir']})
        #lcsndir = '/cosma5/data/dp004/dc-beck3/StrongLensing/LightCone/full_physics/Rockstar/LC_SN_L62_N512_GR_kpc_2.h5'
        #lcsnf = h5py.File(lcsndir, 'r')
        #lcsndf2 = pd.DataFrame({'HF_ID' : lcsnf['Halo_Rockstar_ID'],
        #                       'LC_ID' : lcsnf['Halo_ID'],
        #                       'snapnum' : lcsnf['snapnum'],
        #                       'zl' : lcsnf['Halo_z'],
        #                       'Rvir' : lcsnf['Rvir']})
        #lcsndf = pd.concat([lcsndf1, lcsndf2])
        previous_snapnum = -1
        # Run through lenses
        for ll in range(len(LM['HF_ID'])):
            # Load Lens properties
            HFID = int(LM['HF_ID'][ll])  #int(LM['Rockstar_ID'][ll])
            snapnum = int(LM['snapnum'][ll])
            zl = LM['zl'][ll]

            # Only load new particle data if lens is at another snapshot
            if (previous_snapnum != snapnum):
                print('::::: Load snapshot: %s' % \
                        (args["rksdir"]+'halos_%d.dat' % snapnum))
                hdata = pd.read_csv(args["rksdir"] + 'halos_%d.dat' % snapnum,
                                    sep='\s+',
                                    skiprows=np.arange(1, 16))
                # Load Particle Properties
                snap = snapfile % (snapnum, snapnum)
                s = read_hdf5.snapshot(snapnum, args["simdir"])
                s.read([
                    "Coordinates", "Masses", "Velocities",
                    "GFM_StellarFormationTime"
                ],
                       parttype=[4])
                age = (s.data['GFM_StellarFormationTime']['stars']
                       ).astype('float64')
                star_pos = s.data['Coordinates']['stars'][age >= 0, :] * scale
                star_mass = s.data['Masses']['stars'][age >= 0]
                star_vel = s.data['Velocities']['stars'][age >= 0, :]
            previous_snapnum = snapnum

            # Load Halo Properties
            try:
                subhalo = hdata.loc[hdata['#ID'] == HFID]
            except:
                continue
            HPos = subhalo[['X', 'Y', 'Z']].values[0]
            Vrms = subhalo['Vrms'].values[0]  #[km/s]
            M200 = subhalo['Mvir'].values[0]  #[km/s]
            hvel = subhalo[['VX', 'VY', 'VZ']].values[0]
            epva = subhalo[['A[x]', 'A[y]', 'A[z]']].values[0]
            epvb = subhalo[['B[x]', 'B[y]', 'B[z]']].values[0]
            epvc = subhalo[['C[x]', 'C[y]', 'C[z]']].values[0]

            ####----> Add keys <----####
            if args["radius"] == 'Rshm':
                #Rhalfmass = hdata['Halfmass_Radius'][indx]*u.kpc
                # Stellar Half Mass Radius
                Rad_dyn = cf.call_stellar_halfmass(
                    star_pos[:, 0], star_pos[:, 1], star_pos[:, 2], HPos[0],
                    HPos[1], HPos[2], star_mass, Rvir.to_value('Mpc')) * u.Mpc
                if Rshm == 0.0:
                    continue

                ## Stellar Half Light Radius
                ### https://arxiv.org/pdf/1804.04492.pdf $3.3
            else:
                Rad_dyn = subhalo['Rvir'].values[0] * u.kpc

            ## Dynamical Mass
            star_indx = lppf.check_in_sphere(HPos, star_pos,
                                             Rad_dyn.to_value('kpc'))
            if len(star_indx[0]) > 100:
                slices = np.vstack(
                    (epva / np.linalg.norm(epva), epvb / np.linalg.norm(epvb),
                     epvc / np.linalg.norm(epvc)))
                mdyn_s, mdyn_r, vrms_s = lppf.mass_dynamical(
                    Rad_dyn, star_vel[star_indx], HPos, hvel, slices, Vrms)
            else:
                print('!!! Not enough particles for Mdyn')
                continue
            if np.isnan(mdyn_s):
                print('!!! Mdyn = NaN')
                continue

            ## Lensing Mass
            # Run through sources
            for ss in range(len(LM['Sources']['Src_ID'][ll])):
                n_imgs = len(LM['Sources']['mu'][ll][ss])
                if n_imgs == 1:
                    #    print('!!! numer of lensing images = 1, ', n_imgs)
                    continue
                zs = LM['Sources']['zs'][ll][ss]
                Rein_arc = LM['Sources']['Rein'][ll][ss] * u.arcsec
                Rein = Rein_arc.to_value('rad') * \
                        cosmo.angular_diameter_distance(zl).to('kpc')
                Lens['n_imgs'].append(n_imgs)
                Lens['M200'].append(M200)
                Lens['Mlens'].append(lppf.mass_lensing(Rein, zl, zs, cosmo))
                Lens['Mdyn_rks'].append(mdyn_r)
                Lens['Mdyn_stellar'].append(mdyn_s)
                Lens["Vrms_stellar"].append(vrms_s.to_value('km/s'))
                Lens["Vrms_rks"].append(Vrms)
                Lens['Rein'].append(LM['Sources']['Rein'][ll][ss])
                Lens['HF_ID'].append(HFID)
                Lens['LC_ID'].append(LM['LC_ID'][ll])
                Lens['SrcID'].append(LM['Sources']['Src_ID'][ll][ss])
                Lens['zl'].append(zl)
                Lens['zs'].append(zs)
                print('Saved data of lens %d' % (ll))
    df = pd.DataFrame.from_dict(Lens)
    print('Saving %d lenses to .hdf5' % (len(df.index)))
    label = args["simdir"].split('/')[-2].split('_')[-2]
    fname = (args["outbase"] + 'LPP_%s_2.h5' % label)
    df.to_hdf(fname, key='df', mode='w')
Exemplo n.º 18
0
def create_density_maps():
    time_start = time.time()
    # Get command line arguments
    args = {}
    if comm_rank == 0:
        args["simdir"] = sys.argv[1]
        args["hfdir"] = sys.argv[2]
        args["lcdir"] = sys.argv[3]
        args["ncells"] = int(sys.argv[4])
        args["walltime"] = int(sys.argv[5])
        args["outbase"] = sys.argv[6]
    args = comm.bcast(args, root=0)
    label = args["simdir"].split('/')[-2].split('_')[2]
    hflabel = whichhalofinder(args["lcdir"])

    # Load LightCone Contents
    if comm_rank == 0:
        lchdf = h5py.File(args["lcdir"], 'r')
        dfhalo = pd.DataFrame({
            'HF_ID': lchdf['Halo_Rockstar_ID'].value,
            'ID': lchdf['Halo_ID'].value,
            'Halo_z': lchdf['Halo_z'].value,
            'snapnum': lchdf['snapnum'].value,
            'Vrms': lchdf['VelDisp'].value,
            'fov_Mpc': lchdf['FOV'][:][1],
            ('HaloPosBox', 'X'): lchdf['HaloPosBox'][:, 0],
            ('HaloPosBox', 'Y'): lchdf['HaloPosBox'][:, 1],
            ('HaloPosBox', 'Z'): lchdf['HaloPosBox'][:, 2]
        })
        nhalo_per_snapshot = dfhalo.groupby('snapnum').count()['HF_ID']
        snapshots = dfhalo.groupby('snapnum').count().index.values
        dfhalo = dfhalo.sort_values(by=['snapnum'])
    else:
        nhalo_per_snapshot = None
    nhalo_per_snapshot = comm.bcast(nhalo_per_snapshot, root=0)

    sigma_tot = []
    out_hfid = []
    out_lcid = []
    out_redshift = []
    out_snapshot = []
    out_vrms = []
    out_fov = []
    ## Run over Snapshots
    for ss in range(len(nhalo_per_snapshot))[-2:]:
        print('Snapshot %d of %d' % (ss, len(nhalo_per_snapshot)))

        if comm_rank == 0:
            dfhalosnap = dfhalo.loc[dfhalo['snapnum'] == snapshots[ss]]
            # Load simulation
            s = read_hdf5.snapshot(snapshots[ss], args["simdir"])
            s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
                   parttype=[0, 1, 4, 5])
            scale = 1e-3 * s.header.hubble
            cosmo = LambdaCDM(H0=s.header.hubble * 100,
                              Om0=s.header.omega_m,
                              Ode0=s.header.omega_l)
            print(': Redshift: %f' % s.header.redshift)

            sh_hfid = dfhalosnap['HF_ID'].values
            sh_id = dfhalosnap['ID'].values
            sh_red = dfhalosnap['Halo_z'].values
            sh_snap = dfhalosnap['snapnum'].values
            sh_vrms = dfhalosnap['Vrms'].values
            sh_fov = dfhalosnap['fov_Mpc'].values
            sh_x = dfhalosnap[('HaloPosBox', 'X')].values
            sh_y = dfhalosnap[('HaloPosBox', 'Y')].values
            sh_z = dfhalosnap[('HaloPosBox', 'Z')].values
            hist_edges = procdiv.histedges_equalN(sh_x, comm_size)
            SH = procdiv.cluster_subhalos_lc(sh_hfid, sh_id, sh_red, sh_snap,
                                             sh_vrms, sh_fov, sh_x, sh_y, sh_z,
                                             hist_edges, comm_size)

            ## Dark Matter
            DM = {
                'Mass': (s.data['Masses']['dm']).astype('float64'),
                'Pos': (s.data['Coordinates']['dm'] * scale).astype('float64')
            }
            ## Gas
            Gas = {
                'Mass': (s.data['Masses']['gas']).astype('float64'),
                'Pos': (s.data['Coordinates']['gas'] * scale).astype('float64')
            }
            ## Stars
            age = (
                s.data['GFM_StellarFormationTime']['stars']).astype('float64')
            Star = {
                'Mass':
                (s.data['Masses']['stars'][age >= 0]).astype('float64'),
                'Pos': (s.data['Coordinates']['stars'][age >= 0, :] *
                        scale).astype('float64')
            }
            ## BH
            BH = {
                'Mass': (s.data['Masses']['bh']).astype('float64'),
                'Pos': (s.data['Coordinates']['bh'] * scale).astype('float64')
            }

            # Calculate overlap for particle cuboids
            c = (const.c).to_value('km/s')
            fov_rad = 4 * np.pi * (np.percentile(SH['Vrms'], 90) / c)**2
            sh_dist = (cosmo.comoving_distance(
                s.header.redshift)).to_value('Mpc')
            alpha = 6  # multiplied by 4 because of Oguri&Marshall
            overlap = 0.5 * alpha * fov_rad * sh_dist  #[Mpc] half of field-of-view
            print('Cuboids overlap is: %f [Mpc]' % overlap)

            DM = procdiv.cluster_particles(DM, hist_edges, comm_size)
            Gas = procdiv.cluster_particles(Gas, hist_edges, comm_size)
            Star = procdiv.cluster_particles(Star, hist_edges, comm_size)
            BH = procdiv.cluster_particles(BH, hist_edges, comm_size)
        else:
            overlap = None
            hist_edges = None
            SH = {
                'HF_ID': None,
                'ID': None,
                'redshift': None,
                'snapshot': None,
                'Vrms': None,
                'fov_Mpc': None,
                'X': None,
                'Y': None,
                'Z': None,
                'split_size_1d': None,
                'split_disp_1d': None
            }
            DM = {
                'Mass': None,
                'X': None,
                'Y': None,
                'Z': None,
                'split_size_1d': None,
                'split_disp_1d': None
            }
            Gas = {
                'Mass': None,
                'X': None,
                'Y': None,
                'Z': None,
                'split_size_1d': None,
                'split_disp_1d': None
            }
            Star = {
                'Mass': None,
                'X': None,
                'Y': None,
                'Z': None,
                'split_size_1d': None,
                'split_disp_1d': None
            }
            BH = {
                'Mass': None,
                'X': None,
                'Y': None,
                'Z': None,
                'split_size_1d': None,
                'split_disp_1d': None
            }
        # Broadcast variables over all processors
        overlap = comm.bcast(overlap, root=0)
        hist_edges = comm.bcast(hist_edges, root=0)
        sh_split_size_1d = comm.bcast(SH['split_size_1d'], root=0)
        dm_split_size_1d = comm.bcast(DM['split_size_1d'], root=0)
        gas_split_size_1d = comm.bcast(Gas['split_size_1d'], root=0)
        star_split_size_1d = comm.bcast(Star['split_size_1d'], root=0)
        bh_split_size_1d = comm.bcast(BH['split_size_1d'], root=0)

        SH = procdiv.scatter_subhalos_lc(SH,
                                         sh_split_size_1d,
                                         comm_rank,
                                         comm,
                                         root_proc=0)
        DM = procdiv.scatter_particles(DM,
                                       dm_split_size_1d,
                                       comm_rank,
                                       comm,
                                       root_proc=0)
        Gas = procdiv.scatter_particles(Gas,
                                        gas_split_size_1d,
                                        comm_rank,
                                        comm,
                                        root_proc=0)
        Star = procdiv.scatter_particles(Star,
                                         star_split_size_1d,
                                         comm_rank,
                                         comm,
                                         root_proc=0)
        BH = procdiv.scatter_particles(BH,
                                       bh_split_size_1d,
                                       comm_rank,
                                       comm,
                                       root_proc=0)

        print(
            ': Proc. %d got: \n\t %d Sub-&Halos \n\t %d dark matter \n\t %d gas \n\t %d stars \n'
            % (comm_rank, int(
                sh_split_size_1d[comm_rank]), int(dm_split_size_1d[comm_rank]),
               int(gas_split_size_1d[comm_rank]),
               int(star_split_size_1d[comm_rank])))

        ## Run over Sub-&Halos
        for ll in range(len(SH['ID'])):
            print('Lens %d' % (ll))
            #TODO: for z=0 sh_dist=0!!!

            smlpixel = 20  # maximum smoothing pixel length
            ## BH
            pos, indx = dmaps.select_particles(
                BH['Pos'],
                SH['Pos'][ll],  #*a/h,
                SH['fov_Mpc'][ll],
                'box')
            bh_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                BH['Mass'][indx],
                SH['fov_Mpc'][ll],
                args["ncells"],
                hmax=smlpixel)
            ## Star
            pos, indx = dmaps.select_particles(
                Star['Pos'],
                SH['Pos'][ll],  #*a/h,
                SH['fov_Mpc'][ll],
                'box')
            star_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                Star['Mass'][indx],
                SH['fov_Mpc'][ll],
                args["ncells"],
                hmax=smlpixel)
            ## Gas
            pos, indx = dmaps.select_particles(
                Gas['Pos'],
                SH['Pos'][ll],  #*a/h
                SH['fov_Mpc'][ll],
                'box')
            gas_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                Gas['Mass'][indx],
                SH['fov_Mpc'][ll],
                args["ncells"],
                hmax=smlpixel)
            ## DM
            pos, indx = dmaps.select_particles(
                DM['Pos'],
                SH['Pos'][ll],  #*a/h
                SH['fov_Mpc'][ll],
                'box')
            dm_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                DM['Mass'][indx],
                SH['fov_Mpc'][ll],  #[Mpc]
                args["ncells"],
                hmax=smlpixel)
            sigmatotal = dm_sigma + gas_sigma + star_sigma + bh_sigma

            # Make sure that density-map if filled
            while 0.0 in sigmatotal:
                smlpixel += 5
                dm_sigma = dmaps.projected_density_pmesh_adaptive(
                    pos,
                    DM['Mass'][indx],
                    SH['fov_Mpc'][ll],  #[Mpc]
                    args["ncells"],
                    hmax=smlpixel)
                sigmatotal = dm_sigma + gas_sigma + star_sigma + bh_sigma

            #tmap.plotting(sigmatotal, args["ncells"],
            #              SH['fov_Mpc'][ll], SH['redshift'][ll])
            sigma_tot.append(sigmatotal)
            out_hfid.append(SH['HF_ID'][ll])
            out_lcid.append(SH['ID'][ll])
            out_redshift.append(SH['redshift'][ll])
            out_snapshot.append(SH['snapshot'][ll])
            out_vrms.append(SH['Vrms'][ll])
            out_fov.append(SH['fov_Mpc'][ll])
            if args["walltime"] - (time_start - time.time()) / (60 *
                                                                60) < 0.25:
                fname = args["outbase"] + 'DM_' + label + '_lc.h5'
                hf = h5py.File(fname, 'w')
                hf.create_dataset('density_map', data=sigma_tot)
                hf.create_dataset('HF_ID', data=np.asarray(out_hfid))
                hf.create_dataset('LC_ID', data=np.asarray(out_lcid))
                hf.create_dataset('redshift', data=np.asarray(out_redshift))
                hf.create_dataset('snapshot', data=np.asarray(out_snapshot))
                hf.create_dataset('Vrms', data=np.asarray(out_vrms))
                hf.create_dataset('fov_Mpc', data=np.asarray(out_fov))
                hf.close()

    # Gather the Results
    #comm.Barrier()
    #comm.Gather(out_hfid, [rootout_hfid,split_sizes,displacements,MPI.DOUBLE], root=0)

    fname = args["outbase"] + 'DM_' + label + '_lc.h5'
    hf = h5py.File(fname, 'w')
    hf.create_dataset('density_map', data=sigma_tot)
    hf.create_dataset('HF_ID', data=np.asarray(out_hfid))
    hf.create_dataset('LC_ID', data=np.asarray(out_lcid))
    hf.create_dataset('redshift', data=np.asarray(out_redshift))
    hf.create_dataset('snapshot', data=np.asarray(out_snapshot))
    hf.create_dataset('Vrms', data=np.asarray(out_vrms))
    hf.create_dataset('fov_Mpc', data=np.asarray(out_fov))
    #RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
    hf.close()
Exemplo n.º 19
0
    def __init__(self, hfdir, snapdir, snapnum, halo_finder):
        """
        Load Subhalo properties of snapshots into Dictionary
        Use:
            box = Lightcone(snapshot_file_name)

        Input:
            hfdir: halo-finder directory
            snap_dir: snapshot directory
            snapnum: snapshot number
            halo_finder: Subfind, Rockstar or AHF
        
        Output:
            prop_box: dictionary with subhalo properties
            boxlength: side length of simulated box in units used in simulation
                       (e.g. Mpc/h or kpc/h)
        """
        if halo_finder == 'Subfind':
            blockprint()
            s = read_hdf5.snapshot(snapnum, hfdir)
            s.group_catalog(["SubhaloIDMostbound", "SubhaloPos", "SubhaloVel",
                             "SubhaloMass", "SubhaloVmax", "SubhaloVelDisp",
                             "SubhaloVmaxRad", "SubhaloHalfmassRad"])
            self.unitlength = s.header.unitlength
            self.boxsize = s.header.boxsize
            enableprint()
            indx = np.where(s.cat["SubhaloMass"] > 10**11)[0]
            #sub_id = self.subhalo_selection(pos, data['Mvir'], data['Vrms'], 0, 0)
            prop_box = {'snapnum' : np.ones(len(s.cat['SubhaloIDMostbound'][indx])) * \
                                    snapnum,
                        'ID' : s.cat['SubhaloIDMostbound'][indx],
                        'pos' : s.cat['SubhaloPos'][indx, :],
                        'pos_b' : s.cat['SubhaloPos'][indx, :],
                        'vel_b' : s.cat['SubhaloVel'][indx, :],
                        'Mvir_b' : s.cat['SubhaloMass'][indx],
                        'velmax_b' : s.cat['SubhaloVmax'][indx],
                        'veldisp_b' : s.cat['SubhaloVelDisp'][indx],
                        'rvmax_b' : s.cat['SubhaloVmaxRad'][indx],
                        'rhalfmass_b' : s.cat['SubhaloHalfmassRad'][indx]}
            self.prop = prop_box
        elif halo_finder == 'Rockstar':
            hf_dir = hfdir + 'halos_%d.dat' % snapnum
            data = pd.read_csv(hf_dir, sep='\s+', skiprows=np.arange(1, 16))
            #if LengthUnit == 'kpc':
            #    pos = pd.concat([data['X']*1e-3, data['Y']*1e-3, data['Z']*1e-3], axis=1)
            #else:
            #    pos = pd.concat([data['X'], data['Y'], data['Z']], axis=1)
            self.boxsize = 62  #[Mpc]
            vel = pd.concat([data['VX'], data['VY'], data['VZ']], axis=1)
            sub_id = self.subhalo_selection(pos, data['Mvir'], data['Vrms'], 0, 0)
            prop_box = {'snapnum' : snapnum*np.ones(len(sub_id[0])),
                        'ID' : data['#ID'].values[sub_id][0],
                        'pos' : pos.values[sub_id][0],
                        'pos_b' : pos.values[sub_id][0],
                        'vel_b' : vel.values[sub_id][0],
                        'Mvir_b' : data['Mvir'].values[sub_id][0],
                        'M200b_b' : data['M200b'].values[sub_id][0],
                        'velmax_b' : data['Vmax'].values[sub_id][0],
                        'veldisp_b' : data['Vrms'].values[sub_id][0],
                        'rvir_b' : data['Rvir'].values[sub_id][0],
                        'rs_b' : data['Rs'].values[sub_id][0],
                        'rvmax_b' : data['Rvmax'].values[sub_id][0],
                        'Halfmass_Radius' : data['Halfmass_Radius'].values[sub_id][0]}
            self.prop= prop_box
        elif halo_finder == 'AHF':
            pass
Exemplo n.º 20
0
def create_density_maps():
    time_start = time.time()
    # Get command line arguments
    args = {}
    args["simdir"] = sys.argv[1]
    args["lcdir"] = sys.argv[2]
    args["ncells"] = int(sys.argv[3])
    args["sml"] = int(sys.argv[4])
    args["walltime"] = int(sys.argv[5])
    args["outbase"] = sys.argv[6]
    label = args["simdir"].split('/')[-2].split('_')[2]
    lclabel = args["lcdir"].split('/')[-1][-4]
    # Characteristics
    hflabel = whichhalofinder(args["lcdir"])

    # Load LightCone Contents
    lchdf = h5py.File(args["lcdir"], 'r')
    dfhalo = pd.DataFrame({
        'HF_ID': lchdf['HF_ID'].value,
        'LC_ID': lchdf['LC_ID'].value,
        'Halo_z': lchdf['Halo_z'].value,
        'snapnum': lchdf['snapnum'].value,
        'Vrms': lchdf['VelDisp'].value,
        'fov_Mpc': lchdf['FOV'][:][1],
        ('HaloPosBox', 'X'): lchdf['HaloPosBox'][:, 0],
        ('HaloPosBox', 'Y'): lchdf['HaloPosBox'][:, 1],
        ('HaloPosBox', 'Z'): lchdf['HaloPosBox'][:, 2]
    })

    if len(dfhalo.index.values) > 2000:
        # Limit number of halos, to keep comp. cost down
        dfhalo = dfhalo.sample(n=2000)
    print('There are %d galaxies in this lightcone' % len(dfhalo.index.values))

    nhalo_per_snapshot = dfhalo.groupby('snapnum').count()['HF_ID']
    print('devided over lightcone as:')
    print(nhalo_per_snapshot)
    snapshots = dfhalo.groupby('snapnum').count().index.values
    dfhalo = dfhalo.sort_values(by=['snapnum'])

    sigma_tot = []
    out_hfid = []
    out_lcid = []
    out_redshift = []
    out_snapnum = []
    out_vrms = []
    out_fov = []

    ## Run over Snapshots
    for ss in range(len(nhalo_per_snapshot)):
        print('Snapshot %d of %d' % (ss, len(nhalo_per_snapshot)))
        dfhalosnap = dfhalo.loc[dfhalo['snapnum'] == snapshots[ss]]

        # Load simulation
        s = read_hdf5.snapshot(snapshots[ss], args["simdir"])
        s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
               parttype=[0, 1, 4, 5])
        scale = 1e-3 * s.header.hubble
        print(': Redshift: %f' % s.header.redshift)

        DM, Gas, Star, BH = particle_data(s.data, h, 'kpc')
        ## Run over Sub-&Halos
        for ll in range(len(dfhalosnap.index)):
            print('Lens %d of %d' % (ll, len(dfhalosnap.index)))
            #TODO: for z=0 sh_dist=0!!!

            # Define Cosmology
            cosmo = LambdaCDM(H0=s.header.hubble * 100,
                              Om0=s.header.omega_m,
                              Ode0=s.header.omega_l)
            cosmosim = {
                'omega_M_0': s.header.omega_m,
                'omega_lambda_0': s.header.omega_l,
                'omega_k_0': 0.0,
                'h': s.header.hubble
            }

            smlpixel = args["sml"]  # maximum smoothing pixel length
            shpos = dfhalosnap.filter(regex='HaloPosBox').iloc[ll].values
            #time_start = time.time()
            ## BH
            pos, indx = dmaps.select_particles(
                BH['Pos'],
                shpos,  #*a/h,
                dfhalosnap['fov_Mpc'].values[ll],
                'box')
            bh_sigma = dmaps.projected_density_pmesh(
                pos, BH['Mass'][indx], dfhalosnap['fov_Mpc'].values[ll],
                args["ncells"])
            ## Star
            pos, indx = dmaps.select_particles(
                Gas['Pos'],
                shpos,  #*a/h,
                dfhalosnap['fov_Mpc'].values[ll],
                'box')
            gas_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                Gas['Mass'][indx],
                dfhalosnap['fov_Mpc'].values[ll],
                args["ncells"],
                hmax=smlpixel)
            ## Gas
            pos, indx = dmaps.select_particles(
                Star['Pos'],
                shpos,  #*a/h
                dfhalosnap['fov_Mpc'].values[ll],
                'box')
            star_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                Star['Mass'][indx],
                dfhalosnap['fov_Mpc'].values[ll],
                args["ncells"],
                hmax=smlpixel)
            ## DM
            pos, indx = dmaps.select_particles(
                DM['Pos'],
                shpos,  #*a/h
                dfhalosnap['fov_Mpc'].values[ll],
                'box')
            dm_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                DM['Mass'][indx],
                dfhalosnap['fov_Mpc'].values[ll],  #[Mpc]
                args["ncells"],
                hmax=smlpixel)
            sigmatotal = dm_sigma + gas_sigma + star_sigma + bh_sigma

            # Make sure that density-map if filled
            extention = 0
            while 0.0 in sigmatotal and (extention < 60):
                extention += 5
                dm_sigma = dmaps.projected_density_pmesh_adaptive(
                    pos,
                    DM['Mass'][indx],
                    dfhalosnap['fov_Mpc'].values[ll],  #[Mpc]
                    args["ncells"],
                    hmax=smlpixel + extention)
                sigmatotal = dm_sigma + gas_sigma + star_sigma + bh_sigma

            sigma_tot.append(sigmatotal)
            out_hfid.append(dfhalosnap['HF_ID'].values[ll])
            out_lcid.append(dfhalosnap['LC_ID'].values[ll])
            out_fov.append(dfhalosnap['fov_Mpc'].values[ll])
            if args["walltime"] - (time_start - time.time()) / (60 *
                                                                60) < 0.25:
                fname = args["outbase"] + 'DM_' + label + '_lc' + str(
                    lclabel) + '.h5'
                hf = h5py.File(fname, 'w')
                hf.create_dataset('density_map', data=sigma_tot)
                hf.create_dataset('HF_ID', data=np.asarray(out_hfid))
                hf.create_dataset('LC_ID', data=np.asarray(out_lcid))
                hf.create_dataset('fov_Mpc', data=np.asarray(out_fov))
                hf.close()

    fname = args["outbase"] + 'DM_' + label + '_lc_' + str(lclabel) + '.h5'
    hf = h5py.File(fname, 'w')
    hf.create_dataset('density_map', data=sigma_tot)
    hf.create_dataset('HF_ID', data=np.asarray(out_hfid))
    hf.create_dataset('LC_ID', data=out_lcid)
    hf.create_dataset('fov_Mpc', data=np.asarray(out_fov))
    #RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
    hf.close()
Exemplo n.º 21
0
args = {}
args["simdir"] = sys.argv[1]
args["outdir"] = sys.argv[2]
args["simtype"] = sys.argv[3]
args["nsnap"] = sys.argv[4].split(" ")
args["num_child_voxel"] = int(sys.argv[5])
args["Lbox"] = float(sys.argv[6])
memory_cutoff = 1e8


# Run through snapshots
for ii in range(len(args["nsnap"])):
    args["nsnap"][ii] = int(args["nsnap"][ii])
    s = read_hdf5.snapshot(
        args["nsnap"][ii], args["simdir"], check_total_particle_number=True
    )
    bname = args["outdir"] + "%s_s%d_v%d" % (
        args["simtype"],
        args["nsnap"][ii],
        args["num_child_voxel"],
    )
    args["Lbox"] = args["Lbox"] * 1e3 / s.header.hubble

    # Read subhalos
    #s.group_catalog(["SubhaloPos"])
    #pos = dif.voxeling(s.cat["SubhaloPos"], args)
    #pos = pd.DataFrame(pos, dtype=np.int32)
    #pos.columns = ["x", "y", "z"]
    #pos = dif.counting(pos)
    #arr = dif.structuring(pos, args)
Exemplo n.º 22
0
    def __init__(self, simulation, snapnum):
        '''
		Class to read halo catalogs from simulation

		simulation: name of the simulation
		snapnum: snapshot number to read

		'''

        # Read snapshot
        self.simulation = simulation

        #h5_dir = '/cosma6/data/dp004/dc-arno1/SZ_project/full_physics/L62_N512_%s_kpc/'%self.simulation
        h5_dir = '/cosma5/data/dp004/dc-cues1/TNG300-1/'
        self.snapnum = snapnum
        self.snapshot = read_hdf5.snapshot(snapnum, h5_dir)

        self.boxsize = self.snapshot.header.boxsize  #kpc

        # Useful definitions
        self.dm = 1
        self.stars = 4
        self.dm_particle_mass = self.snapshot.header.massarr[self.dm] * 1.e10
        '''
		param_file = '/cosma6/data/dp004/dc-arno1/SZ_project/full_physics/L62_N512_GR_kpc/parameters-usedvalues'

		with open(param_file) as search:
			for line in search:
				line = line.rstrip()  # remove '\n' at end of line
				if line.split()[0] == 'OmegaBaryon':
					omega_b = float(line.split()[-1])
		self.mean_density = (self.snapshot.header.omega_m - omega_b) * self.snapshot.const.rho_crit
		'''

        self.stellar_mass_thresh = 1.e9
        #self.halo_mass_thresh = 6.e9
        self.halo_mass_thresh = self.dm_particle_mass * 100.  # at least 100 particles
        print('Minimum stellar mass : %.2E' % self.stellar_mass_thresh)
        print('Minimum DM halo mass : %.2E' % self.halo_mass_thresh)

        # Load fields that will be used
        useful_properties = ['GroupMass', 'Group_M_Crit200', 'Group_R_Crit200',\
          'GroupMassType', 'GroupNsubs', 'GroupLenType', 'GroupPos', 'GroupCM','SubhaloCM',\
          'SubhaloMassType','SubhaloMass', 'SubhaloVelDisp', 'SubhaloVmax','GroupFirstSub',\
          'SubhaloHalfmassRadType','GroupPos', 'SubhaloVmaxRad', 'SubhaloSpin', 'SubhaloMassType']

        self.snapshot.group_catalog(useful_properties)

        # Get only resolved halos
        self.halo_mass_cut = self.snapshot.cat[
            'Group_M_Crit200'][:] > self.halo_mass_thresh

        self.N_subhalos = (self.snapshot.cat['GroupNsubs']).astype(np.int64)
        self.N_particles = (self.snapshot.cat['GroupLenType'][:,
                                                              self.dm]).astype(
                                                                  np.int64)

        self.group_offset = (np.cumsum(self.N_particles) -
                             self.N_particles).astype(np.int64)
        self.group_offset = self.group_offset[self.halo_mass_cut]

        self.subhalo_offset = (np.cumsum(self.N_subhalos) -
                               self.N_subhalos).astype(np.int64)
        self.subhalo_offset = self.subhalo_offset[self.halo_mass_cut]

        self.N_subhalos = self.N_subhalos[self.halo_mass_cut]
        self.N_particles = self.N_particles[self.halo_mass_cut]

        self.N_halos = self.N_subhalos.shape[0]

        self.N_gals, self.M_stars = self.Number_of_galaxies()

        self.logM_stars = np.log10(self.M_stars)

        self.load_inmidiate_features()

        print('%d resolved halos found.' % self.N_halos)
        '''
Exemplo n.º 23
0
def create_density_maps():
    # Get command line arguments
    args = {}
    if comm_rank == 0:
        print(':Registered %d processes' % comm_size)
        args["simdir"] = sys.argv[1]
        args["hfdir"] = sys.argv[2]
        args["snapnum"] = int(sys.argv[3])
        args["zs"] = float(sys.argv[4]) / 10
    args = comm.bcast(args)
    label = args["simdir"].split('/')[-2].split('_')[2]

    # Organize devision of Sub-&Halos over Processes on Proc. 0
    if comm_rank == 0:
        # Load simulation
        s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
        s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
               parttype=[0, 1, 4])  #[0,1,4]
        scale = 1e-3 * s.header.hubble

        # Define Cosmology
        cosmo = LambdaCDM(H0=s.header.hubble * 100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
        cosmosim = {
            'omega_M_0': s.header.omega_m,
            'omega_lambda_0': s.header.omega_l,
            'omega_k_0': 0.0,
            'h': s.header.hubble
        }
        redshift = s.header.redshift
        print(': Redshift: %f' % redshift)

        # Sort Sub-&Halos over Processes
        df = pd.read_csv(args["hfdir"] + 'halos_%d.dat' % args["snapnum"],
                         sep='\s+',
                         skiprows=16,
                         usecols=[0, 2, 4, 9, 10, 11],
                         names=['ID', 'Mvir', 'Vrms', 'X', 'Y', 'Z'])
        df = df[df['Mvir'] > 5e11]
        sh_id = df['ID'].values.astype('float64')
        sh_vrms = df['Vrms'].values.astype('float64')
        sh_x = df['X'].values.astype('float64')
        sh_y = df['Y'].values.astype('float64')
        sh_z = df['Z'].values.astype('float64')
        del df
        hist_edges = procdiv.histedges_equalN(sh_x, comm_size)
        SH = procdiv.cluster_subhalos(sh_id, sh_vrms, sh_x, sh_y, sh_z,
                                      hist_edges, comm_size)

        # Calculate overlap for particle cuboids
        c = (const.c).to_value('km/s')
        fov_rad = 4 * np.pi * (np.percentile(SH['Vrms'], 90) / c)**2
        sh_dist = (cosmo.comoving_distance(redshift)).to_value('Mpc')
        alpha = 6  # multiplied by 4 because of Oguri&Marshall
        overlap = 0.5 * alpha * fov_rad * sh_dist  #[Mpc] half of field-of-view
        print('Cuboids overlap is: %f [Mpc]' % overlap)

        # Sort Particles over Processes
        ## Dark Matter
        DM = {
            'Mass': (s.data['Masses']['dm']).astype('float64'),
            'Pos': (s.data['Coordinates']['dm'] * scale).astype('float64')
        }
        DM = procdiv.cluster_particles(DM, hist_edges, comm_size)
        ## Gas
        Gas = {
            'Mass': (s.data['Masses']['gas']).astype('float64'),
            'Pos': (s.data['Coordinates']['gas'] * scale).astype('float64')
        }
        Gas = procdiv.cluster_particles(Gas, hist_edges, comm_size)
        ## Stars
        age = (s.data['GFM_StellarFormationTime']['stars']).astype('float64')
        Star = {
            'Mass': (s.data['Masses']['stars'][age >= 0]).astype('float64'),
            'Pos': (s.data['Coordinates']['stars'][age >= 0, :] *
                    scale).astype('float64')
        }
        del age
        Star = procdiv.cluster_particles(Star, hist_edges, comm_size)

    else:
        c = None
        alpha = None
        overlap = None
        cosmosim = None
        cosmo = None
        redshift = None
        hist_edges = None
        SH = {
            'ID': None,
            'Vrms': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        DM = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        Gas = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        Star = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }

    # Broadcast variables over all processors
    sh_split_size_1d = comm.bcast(SH['split_size_1d'], root=0)
    dm_split_size_1d = comm.bcast(DM['split_size_1d'], root=0)
    gas_split_size_1d = comm.bcast(Gas['split_size_1d'], root=0)
    star_split_size_1d = comm.bcast(Star['split_size_1d'], root=0)
    c = comm.bcast(c, root=0)
    alpha = comm.bcast(alpha, root=0)
    overlap = comm.bcast(overlap, root=0)
    cosmo = comm.bcast(cosmo, root=0)
    redshift = comm.bcast(redshift, root=0)
    hist_edges = comm.bcast(hist_edges, root=0)

    SH = procdiv.scatter_subhalos(SH,
                                  sh_split_size_1d,
                                  comm_rank,
                                  comm,
                                  root_proc=0)
    DM = procdiv.scatter_particles(DM,
                                   dm_split_size_1d,
                                   comm_rank,
                                   comm,
                                   root_proc=0)
    Gas = procdiv.scatter_particles(Gas,
                                    gas_split_size_1d,
                                    comm_rank,
                                    comm,
                                    root_proc=0)
    Star = procdiv.scatter_particles(Star,
                                     star_split_size_1d,
                                     comm_rank,
                                     comm,
                                     root_proc=0)
    print(
        ': Proc. %d got: \n\t %d Sub-&Halos \n\t %d dark matter \n\t %d gas \n\t %d stars \n'
        % (comm_rank, int(sh_split_size_1d[comm_rank]),
           int(dm_split_size_1d[comm_rank]), int(gas_split_size_1d[comm_rank]),
           int(star_split_size_1d[comm_rank])))

    ## Run over Sub-&Halos
    zl = redshift
    zs = args["zs"]
    ncells = [512, 256, 128]
    nparts = [1, 2, 4, 8]
    M200 = np.ones(len(SH['ID']))
    ID = np.ones(len(SH['ID']))
    Rein = np.ones((len(SH['ID']), len(ncells), len(nparts)))
    for ll in range(len(SH['ID'])):
        # Define field-of-view
        fov_rad = 4 * np.pi * (SH['Vrms'][ll] / c)**2
        sh_dist = (cosmo.comoving_distance(redshift)).to_value('Mpc')
        fov_Mpc = alpha * fov_rad * sh_dist  #[Mpc] is it the diameter?
        fov_arc = (fov_Mpc / cf.Da(zl, cosmo) * u.rad).to_value('arcsec')
        sigma_cr = sigma_crit(zl, zs, cosmo).to_value('Msun Mpc-2')

        # Check cuboid boundary condition,
        # that all surface densities are filled with particles
        if ((SH['Pos'][ll,0]-hist_edges[comm_rank] < overlap) or
                (hist_edges[comm_rank+1]-overlap < \
                 SH['Pos'][ll,0]-hist_edges[comm_rank])):
            if fov_Mpc * 0.45 > overlap:
                print("FOV is bigger than cuboids overlap: %f > %f" % \
                        (fov_Mpc*0.45, overlap))
                continue

        ## Run over different Ncells
        for cc in range(len(ncells)):
            dsx_arc = fov_arc / ncells[cc]  #[arcsec] pixel size

            ## Run over particle reductions
            for mm in range(len(nparts)):
                smlpixel = 20  # maximum smoothing pixel length
                pos, indx = dmap.select_particles(
                    Gas['Pos'],
                    SH['Pos'][ll],  #*a/h,
                    fov_Mpc,
                    'box')
                gas_sigma = dmap.projected_density_pmesh_adaptive(
                    pos[::nparts[mm], :],
                    Gas['Mass'][indx][::nparts[mm]],
                    fov_Mpc,
                    ncells[cc],
                    hmax=smlpixel)
                pos, indx = dmap.select_particles(
                    Star['Pos'],
                    SH['Pos'][ll],  #*a/h,
                    fov_Mpc,
                    'box')
                star_sigma = dmap.projected_density_pmesh_adaptive(
                    pos[::nparts[mm], :],
                    Star['Mass'][indx][::nparts[mm]],
                    fov_Mpc,
                    ncells[cc],
                    hmax=smlpixel)
                pos, indx = dmap.select_particles(
                    DM['Pos'],
                    SH['Pos'][ll],  #*a/h,
                    fov_Mpc,
                    'box')
                dm_sigma = dmap.projected_density_pmesh_adaptive(
                    pos[::nparts[mm], :],
                    DM['Mass'][indx][::nparts[mm]],
                    fov_Mpc,  #[Mpc]
                    ncells[cc],
                    hmax=smlpixel)
                tot_sigma = dm_sigma + gas_sigma + star_sigma

                # Make sure that density-map is filled
                while 0.0 in tot_sigma:
                    smlpixel += 5
                    dm_sigma = dmap.projected_density_pmesh_adaptive(
                        pos[::nparts[mm], :],
                        DM['Mass'][indx][::nparts[mm]],
                        fov_Mpc,  #[Mpc]
                        ncells[cc],
                        hmax=smlpixel)
                    tot_sigma = dm_sigma + gas_sigma + star_sigma
                #tmap.plotting(tot_sigma, ncells[cc], fov_Mpc, zl)

                # initialize the coordinates of grids (light rays on lens plan)
                lpv = np.linspace(-(fov_arc - dsx_arc) / 2,
                                  (fov_arc - dsx_arc) / 2, ncells[cc])
                lp1, lp2 = np.meshgrid(lpv, lpv)  #[arcsec]

                fig = plt.figure()
                ax = fig.add_subplot(111)
                # Calculate convergence map
                kappa = tot_sigma / sigma_cr

                # Calculate Deflection Maps
                alpha1, alpha2, mu_map, phi, detA, lambda_t = cal_lensing_signals(
                    kappa, fov_arc, ncells[cc])
                # Calculate Einstein Radii
                Rein[ll, cc, mm] = einstein_radii(lp1, lp2, detA, lambda_t, zl,
                                                  cosmo, ax, 'med', ll)
                #print('Rein = %f' % Rein[ll, cc, mm])
                ID[ll] = SH['ID'][ll]
                #plt.close(fig)
    output = {}
    for cc in range(len(ncells)):
        for mm in range(len(nparts)):
            output[(str(ncells[cc]), str(nparts[mm]))] = Rein[:, cc, mm]
    df = pd.DataFrame.from_dict(output)
    df['ID'] = ID
    #self.df = pd.concat([self.df, dfp], axis=1)
    fname = 'DMConvTest_' + label + '_' + str(comm_rank) + '_zs150.h5'
    df.to_hdf(fname, key='Rein', mode='w')
    plt.close(fig)
Exemplo n.º 24
0
def create_density_maps():
    # Get command line arguments
    args = {}
    if comm_rank == 0:
        print(':Registered %d processes' % comm_size)
        args["simdir"] = sys.argv[1]
        args["hfname"] = sys.argv[2]
        args["hfdir"] = sys.argv[3]
        args["snapnum"] = int(sys.argv[4])
        args["ncells"] = int(sys.argv[5])
        args["smlpixel"] = int(sys.argv[6])
        args["outbase"] = sys.argv[7]
    args = comm.bcast(args)
    label = args["simdir"].split('/')[-2].split('_')[2]

    # Organize devision of Sub-&Halos over Processes on Proc. 0
    if comm_rank == 0:
        # Load simulation
        s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
        s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
               parttype=[0, 1, 4, 5])

        unitlength = dmaps.define_unit(s.header.unitlength)
        # Define Cosmology
        cosmo = LambdaCDM(H0=s.header.hubble * 100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
        redshift = s.header.redshift
        print(': Redshift: %f' % redshift)

        # Sort Sub-&Halos over Processes
        SH = subhalo_data(args["hfdir"], args["hfname"], args["snapnum"],
                          s.header.hubble, s.header.unitlength)
        hist_edges = procdiv.histedges_equalN(SH['X'], comm_size)
        SH = procdiv.cluster_subhalos_box(SH, hist_edges, comm_size)

        # Calculate overlap for particle cuboids
        c = (const.c).to_value('km/s')
        fov_rad = 4 * np.pi * (np.percentile(SH['Vrms'], 90) / c)**2
        sh_dist = (cosmo.comoving_distance(redshift)).to_value(unitlength)
        alpha = 2  # multiplied by 4 because of Oguri&Marshall
        overlap = 0.5 * alpha * fov_rad * sh_dist  # half of field-of-view
        print('Cuboids overlap is: %f [%s]' % (overlap, unitlength))

        # Sort Particles over Processes
        DM, Gas, Star, BH = particle_data(s.data, s.header.hubble, unitlength)
        DM = procdiv.cluster_particles(DM, hist_edges, comm_size)
        Gas = procdiv.cluster_particles(Gas, hist_edges, comm_size)
        Star = procdiv.cluster_particles(Star, hist_edges, comm_size)
        BH = procdiv.cluster_particles(BH, hist_edges, comm_size)

    else:
        c = None
        alpha = None
        overlap = None
        unitlength = None
        cosmo = None
        redshift = None
        hist_edges = None
        SH = {
            'ID': None,
            'Vrms': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        DM = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        Gas = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        Star = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }
        BH = {
            'Mass': None,
            'X': None,
            'Y': None,
            'Z': None,
            'split_size_1d': None,
            'split_disp_1d': None
        }

    # Broadcast variables over all processors
    sh_split_size_1d = comm.bcast(SH['split_size_1d'], root=0)
    sh_split_disp_1d = comm.bcast(SH['split_disp_1d'], root=0)
    dm_split_size_1d = comm.bcast(DM['split_size_1d'], root=0)
    dm_split_disp_1d = comm.bcast(DM['split_disp_1d'], root=0)
    gas_split_size_1d = comm.bcast(Gas['split_size_1d'], root=0)
    gas_split_disp_1d = comm.bcast(Gas['split_disp_1d'], root=0)
    star_split_size_1d = comm.bcast(Star['split_size_1d'], root=0)
    star_split_disp_1d = comm.bcast(Star['split_disp_1d'], root=0)
    bh_split_size_1d = comm.bcast(BH['split_size_1d'], root=0)
    bh_split_disp_1d = comm.bcast(BH['split_disp_1d'], root=0)
    c = comm.bcast(c, root=0)
    unitlength = comm.bcast(unitlength, root=0)
    alpha = comm.bcast(alpha, root=0)
    overlap = comm.bcast(overlap, root=0)
    cosmo = comm.bcast(cosmo, root=0)
    redshift = comm.bcast(redshift, root=0)
    hist_edges = comm.bcast(hist_edges, root=0)

    SH = procdiv.scatter_subhalos(SH,
                                  sh_split_size_1d,
                                  comm_rank,
                                  comm,
                                  root_proc=0)
    DM = procdiv.scatter_particles(DM,
                                   dm_split_size_1d,
                                   comm_rank,
                                   comm,
                                   root_proc=0)
    Gas = procdiv.scatter_particles(Gas,
                                    gas_split_size_1d,
                                    comm_rank,
                                    comm,
                                    root_proc=0)
    Star = procdiv.scatter_particles(Star,
                                     star_split_size_1d,
                                     comm_rank,
                                     comm,
                                     root_proc=0)
    BH = procdiv.scatter_particles(BH,
                                   star_split_size_1d,
                                   comm_rank,
                                   comm,
                                   root_proc=0)

    print(
        ': Proc. %d got: \n\t %d Sub-&Halos \n\t %d dark matter \n\t %d gas \n\t %d stars \n'
        % (comm_rank, int(sh_split_size_1d[comm_rank]),
           int(dm_split_size_1d[comm_rank]), int(gas_split_size_1d[comm_rank]),
           int(star_split_size_1d[comm_rank])))

    sigma_tot = []
    subhalo_id = []
    FOV = []
    ## Run over Sub-&Halos
    for ll in range(len(SH['ID'])):
        # Define field-of-view edge-length
        fov_rad = 4 * np.pi * (SH['Vrms'][ll] / c)**2
        #TODO: for z=0 sh_dist=0!!!
        sh_dist = (cosmo.comoving_distance(redshift)).to_value(unitlength)
        alpha = 1.4
        fov = alpha * fov_rad * sh_dist  #[kpc] edge-length of box

        # Check cuboid boundary condition,
        # that all surface densities are filled with particles
        if ((SH['Pos'][ll][0]-hist_edges[comm_rank] < overlap) or
                (hist_edges[comm_rank+1]-overlap < \
                 SH['Pos'][ll][0]-hist_edges[comm_rank])):
            if fov * 0.45 > overlap:
                print("FOV is bigger than cuboids overlap: %f > %f" % \
                        (fov*0.45, overlap))
                continue

        ## BH
        pos, indx = dmaps.select_particles(
            BH['Pos'],
            SH['Pos'][ll],  #*a/h,
            fov,
            'box')
        bh_sigma = dmaps.projected_density_pmesh(pos, BH['Mass'][indx], fov,
                                                 args["ncells"])

        ## Gas
        pos, indx = dmaps.select_particles(
            Gas['Pos'],
            SH['Pos'][ll],  #*a/h,
            fov,
            'box')
        gas_sigma = dmaps.projected_density_pmesh_adaptive(
            pos, Gas['Mass'][indx], fov, args["ncells"], hmax=args["smlpixel"])
        ## Star
        pos, indx = dmaps.select_particles(
            Star['Pos'],
            SH['Pos'][ll],  #*a/h,
            fov,
            'box')
        star_sigma = dmaps.projected_density_pmesh_adaptive(
            pos,
            Star['Mass'][indx],
            fov,
            args["ncells"],
            hmax=args["smlpixel"])
        ## DM
        pos, indx = dmaps.select_particles(
            DM['Pos'],
            SH['Pos'][ll],  #*a/h,
            fov,
            'box')
        dm_sigma = dmaps.projected_density_pmesh_adaptive(
            pos, DM['Mass'][indx], fov, args["ncells"], hmax=args["smlpixel"])
        sigmatotal = dm_sigma + gas_sigma + star_sigma

        # Make sure that density-map if filled
        extention = 0
        while (0.0 in sigmatotal) and (extention < 60):
            extention += 5
            dm_sigma = dmaps.projected_density_pmesh_adaptive(
                pos,
                DM['Mass'][indx],
                fov,
                args["ncells"],
                hmax=args["smlpixel"] + extention)
            sigmatotal = dm_sigma + gas_sigma + star_sigma

        #tmap.plotting(sigmatotal, args["ncells"], fov, 0.57)
        sigma_tot.append(sigmatotal)
        subhalo_id.append(int(SH['ID'][ll]))
        FOV.append(fov)

    fname = args["outbase"] + 'z_' + str(
        args["snapnum"]) + '/' + 'DM_' + label + '_' + str(comm_rank) + '.h5'
    hf = h5py.File(fname, 'w')
    hf.create_dataset('DMAP',
                      data=sigma_tot)  # density map in unit of simulation
    hf.create_dataset('HFID',
                      data=np.asarray(subhalo_id))  # Rockstar sub-&halo id
    hf.create_dataset(
        'FOV', data=np.asarray(FOV))  # field-of-view in units #[kpc, Mpc]
    #RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
    hf.close()
Exemplo n.º 25
0
    def __init__(self, snapnum=99):
        '''
		Class to read halo catalogs from simulation

		snapnum: snapshot number to read

		'''

        # Read snapshot

        h5_dir = '/cosma7/data/TNG/TNG300-1/'
        self.snapnum = snapnum
        self.snapshot = read_hdf5.snapshot(snapnum, h5_dir)

        self.boxsize = self.snapshot.header.boxsize  #kpc

        # Useful definitions
        self.dm = 1
        self.stars = 4
        self.dm_particle_mass = self.snapshot.header.massarr[self.dm] * 1.e10

        self.stellar_mass_thresh = 1.e9
        #self.halo_mass_thresh = self.dm_particle_mass * 100. # at least 100 particles
        self.halo_mass_thresh = 1.e11

        print('Minimum stellar mass : %.2E' % self.stellar_mass_thresh)
        print('Minimum DM halo mass : %.2E' % self.halo_mass_thresh)

        # Load fields that will be used
        useful_properties = [
            'GroupFirstSub', 'Group_M_Crit200', 'GroupNsubs', 'GroupPos',
            'GroupVel', 'Group_R_Crit200', 'SubhaloMassType', 'GroupMassType',
            'SubhaloCM', 'GroupCM', 'SubhaloMass', 'SubhaloMassInHalfRad',
            'SubhaloSpin', 'SubhaloVelDisp', 'SubhaloVmax'
        ]

        self.snapshot.group_catalog(useful_properties)

        self.N_subhalos = (self.snapshot.cat['GroupNsubs']).astype(np.int64)
        # Get only resolved halos
        self.halo_mass_cut = self.snapshot.cat[
            'Group_M_Crit200'][:] > self.halo_mass_thresh

        #self.group_offset = (np.cumsum(self.N_particles) - self.N_particles).astype(np.int64)
        #self.group_offset = self.group_offset[self.halo_mass_cut]

        self.subhalo_offset = (np.cumsum(self.N_subhalos) -
                               self.N_subhalos).astype(np.int64)
        self.subhalo_offset = self.subhalo_offset[self.halo_mass_cut]

        self.N_subhalos = self.N_subhalos[self.halo_mass_cut]

        self.N_halos = self.N_subhalos.shape[0]
        print('%d resolved halos found.' % self.N_halos)

        self.N_gals, self.M_stars = self.Number_of_galaxies()
        print('%d resolved galaxies found.' % np.sum(self.N_gals))

        self.logM_stars = np.log10(self.M_stars)

        self.load_inmidiate_features()

        self.compute_fsub_unbound()
        self.compute_x_offset()
Exemplo n.º 26
0
def lensing_signal():
    # Get command line arguments
    args = {}
    args["simdir"]       = sys.argv[1]
    args["dmdir"]        = sys.argv[2]
    args["lcdir"]        = sys.argv[3]
    args["outbase"]      = sys.argv[4]
    args["ncells"]      = int(sys.argv[5])
    #args["simdir"]       = '/cosma6/data/dp004/dc-arno1/SZ_project/full_physics/L62_N512_F5_kpc/'
    #args["dmdir"]        = '/cosma5/data/dp004/dc-beck3/StrongLensing/DensityMap/full_physics/L62_N512_F5_kpc/Lightcone/'
    #args["lcdir"]        = '/cosma5/data/dp004/dc-beck3/StrongLensing/LightCone/full_physics/Rockstar/LC_SN_L62_N512_F5_kpc'
    #args["outbase"]      = '/cosma5/data/dp004/dc-beck3/StrongLensing/LensingMap/full_physics/Rockstar/L62_N512_F5_kpc/Lightcone/'
    #args["ncells"]      = 512
    
    # Names of all available Density maps
    dmfile = glob.glob(args["dmdir"]+'*.h5')
    dmfile.sort(key = lambda x: x[-4])
    # Names of all available Lightcones
    lcfile = glob.glob(args["lcdir"]+'*.h5')
    lcfile.sort(key = lambda x: x[-4])

    lenslistinit(); srclistinit()
    # Run through files
    for ff in range(len(dmfile)):
        print('\n')
        print('------------- \n Reading Files: \n%s\n%s' % \
                (dmfile[ff].split('/')[-2:], lcfile[ff].split('/')[-2:]))
        # Load density maps
        dmf = h5py.File(dmfile[ff], 'r')
        dmdf = pd.DataFrame({'HF_ID' : dmf['HF_ID'],
                             'LC_ID' : dmf['LC_ID'],
                             'fov_Mpc' : dmf['fov_Mpc']})
        s1 = pd.Series(dict(list(enumerate(dmf['density_map']))), index=dmdf.index)
        dmdf['density_map'] = s1
        dmdf = dmdf.set_index('LC_ID')

        # Load Lightcones
        lcf = h5py.File(lcfile[ff], 'r')
        lcdf = pd.DataFrame({'HF_ID' : lcf['HF_ID'].value,
                             'LC_ID' : lcf['LC_ID'].value,
                             'zl' : lcf['Halo_z'].value,
                             'vrms' : lcf['VelDisp'].value,
                             'snapnum' : lcf['snapnum'].value,
                             'fov_Mpc' : lcf['FOV'][:][1]})
        lcdf = lcdf.set_index('LC_ID')
        srcdf = {'Src_ID' : lcf['Src_ID'].value,
                 'zs' : lcf['Src_z'].value,
                 'SrcPosSky' : lcf['SrcPosSky'].value,
                 'SrcAbsMag' : lcf['SrcAbsMag'].value}

        print('The minimum Vrms is %f' % (np.min(lcdf['vrms'].values)))

        dmdf = dmdf.sort_values(by=['LC_ID'])
        lcdf = lcdf.sort_values(by=['LC_ID'])
        # sanity check
        assert len(lcdf.index.intersection(dmdf.index)) == len(dmdf.index.values)
        lcdf['density_map'] = dmdf['density_map']
        
        #lcdf = lcdf.sort_values(by=['snapnum'])
        s = read_hdf5.snapshot(45, args["simdir"])
        # Cosmological Parameters
        cosmo = LambdaCDM(H0=s.header.hubble*100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
    
        # Run through lenses
        print('There are %d lenses in file' % (len(lcdf.index.values)))
        for ll in range(len(lcdf.index.values)):
            lens = lcdf.iloc[ll]
            #print('working on lens %d' % lens['HF_ID'])
            # convert. box size and pixels size from ang. diam. dist. to arcsec
            FOV_arc = (lens['fov_Mpc']/cf.Da(lens['zl'], cosmo)*u.rad).to_value('arcsec')
            dsx_arc = FOV_arc/args["ncells"]  #[arcsec] pixel size
            # initialize the coordinates of grids (light rays on lens plan)
            lpv = np.linspace(-(FOV_arc-dsx_arc)/2, (FOV_arc-dsx_arc)/2, args["ncells"])
            lp1, lp2 = np.meshgrid(lpv, lpv)  #[arcsec]
       
            zs, Src_ID, SrcPosSky = lt.source_selection(
                    srcdf['Src_ID'], srcdf['zs'], srcdf['SrcPosSky'],
                    lcdf.index.values[ll])
            
            # Run through sources
            check_for_sources = 0
            fig = plt.figure()
            ax = fig.add_subplot(111)
            for ss in range(len(Src_ID)):
                # Calculate critical surface density
                sigma_cr = lt.sigma_crit(lens['zl'],
                                         zs[ss],
                                         cosmo).to_value('Msun Mpc-2')

                # convert source position from Mpc to arcsec
                beta = lt.mpc2arc(SrcPosSky[ss])
                beta = [bb*1e-3 for bb in beta]

                # Calculate convergence map
                kappa = lens['density_map']/sigma_cr
                #fig = plt.figure()
                #ax = fig.add_subplot(111)
                
                # Calculate Deflection Maps
                alpha1, alpha2, mu_map, phi, detA, lambda_t = lt.cal_lensing_signals(
                        kappa, FOV_arc, args["ncells"]) 
                # Calculate Einstein Radii in [arcsec]
                Ncrit, curve_crit, curve_crit_tan, Rein = lt.einstein_radii(
                        lp1, lp2, detA, lambda_t, lens['zl'], cosmo, ax, 'med')
                #if Rein == 0. or math.isnan(Rein):
                #    print('!!! Rein is 0. or NaN')
                #    continue
                # Calculate Time-Delay and Magnification
                n_imgs, delta_t, mu, theta  = lt.timedelay_magnification(
                        mu_map, phi, dsx_arc, args["ncells"],
                        lp1, lp2, alpha1, alpha2, beta,
                        zs[ss], lens['zl'], cosmo)
                if n_imgs > 1:
                    # Tree Branch 2
                    s_srcID.append(Src_ID[ss])
                    s_zs.append(zs[ss])
                    s_beta.append(beta)
                    #s_lensplane.append([lp1, lp2])
                    s_detA.append(detA)
                    s_tancritcurves.append(curve_crit_tan)
                    s_einsteinradius.append(Rein)  #[arcsec]
                    # Tree Branch 3
                    s_theta.append(theta)
                    s_deltat.append(delta_t)
                    s_mu.append(mu)
                    check_for_sources = 1
                    #print(' -> %d multiple lensed images' % (n_imgs))
            if check_for_sources == 1:
                # Tree Branch 1
                l_HFID.append(int(lens['HF_ID']))
                l_haloID.append(int(lcdf.index.values[ll]))
                l_snapnum.append(int(lens['snapnum']))
                l_zl.append(lens['zl'])
                #l_haloposbox.append(HaloPosBox[ll])
                # Tree Branch 2
                l_srcID.append(s_srcID)
                l_zs.append(s_zs)
                l_srcbeta.append(s_beta)
                #l_lensplane.append(s_lensplane)
                l_detA.append(s_detA)
                l_tancritcurves.append(s_tancritcurves)
                l_einsteinradius.append(s_einsteinradius)
                # Tree Branch 3
                l_srctheta.append(s_theta)
                l_deltat.append(s_deltat)
                l_mu.append(s_mu)
                srclistinit()
                check_for_sources = 0
                print('Save data of lens %d' % ll)
    
    ########## Save to File ########
    tree = plant_Tree()
    ## Tree Branches of Node 1 : Lenses
    #tree['HF_ID'] = l_HFID
    #tree['snapnum'] = args["snapnum"]
    #tree['zl'] = redshift
    #tree['zs'] = zs
    ## Tree Branches of Node 1 : Sources
    #tree['Sources']['beta'] = l_srcbeta
    #tree['Sources']['TCC'] = l_tancritcurves
    #tree['Sources']['Rein'] = l_einsteinradius
    #for imgs in range(len(l_mu)):
    #    # Tree Branches of Node 2 : Multiple Images
    #    tree['Sources']['theta'][imgs] = l_srctheta[imgs]
    #    tree['Sources']['delta_t'][imgs] = l_deltat[imgs]
    #    tree['Sources']['mu'][imgs] = l_mu[imgs]

    # Tree Branches of Node 1 : Lenses
    tree['LC_ID'] = l_haloID
    tree['HF_ID'] = l_HFID
    tree['snapnum'] = l_snapnum
    tree['zl'] = l_zl
    #tree['HaloPosBox'] = l_haloposbox
    for sid in range(len(l_haloID)):
        # Tree Branches of Node 2 : Sources
        tree['Sources']['Src_ID'][sid] = l_srcID[sid]
        tree['Sources']['zs'][sid] = l_zs[sid]
        tree['Sources']['beta'][sid] = l_srcbeta[sid]
        #tree['Sources']['LP'][sid] = l_lensplane[sid]
        tree['Sources']['detA'][sid] = l_detA[sid]
        tree['Sources']['TCC'][sid] = l_tancritcurves[sid]
        tree['Sources']['Rein'][sid] = l_einsteinradius[sid]
        for imgs in range(len(l_srcID[sid])):
            # Tree Branches of Node 3 : Multiple Images
            tree['Sources']['theta'][sid][imgs] = l_srctheta[sid][imgs]
            tree['Sources']['delta_t'][sid][imgs] = l_deltat[sid][imgs]
            tree['Sources']['mu'][sid][imgs] = l_mu[sid][imgs]
    label = args["simdir"].split('/')[-2].split('_')[2]
    filename = args["outbase"]+'LM_%s.pickle' % (label)
    filed = open(filename, 'wb')
    pickle.dump(tree, filed)
    filed.close()
    plt.close(fig)
Exemplo n.º 27
0
def dyn_vs_lensing_mass(CPUs, sim_dir, sim_phy, sim_name, hfname, lc_dir,
                        HQ_dir):
    # protect the 'entry point' for Windows OS
    # if __name__ == '__main__':
    # after importing numpy, reset the CPU affinity of the parent process so
    # that it will use all cores
    os.system("taskset -p 0xff %d" % os.getpid())

    # Run through simulations
    for sim in range(len(sim_dir)):
        # File for lens & source properties
        lc_file = lc_dir[sim] + hfname + '/LC_SN_' + sim_name[
            sim] + '_rndseed1.h5'
        # File for lensing-maps
        lm_dir = HQ_dir + '/LensingMap/' + sim_phy[
            sim] + hfname + '/' + sim_name[sim] + '/'
        # Simulation Snapshots
        snapfile = sim_dir[sim]

        # Units of Simulation
        scale = rf.simulation_units(sim_dir[sim])

        # Cosmological Parameters
        s = read_hdf5.snapshot(45, snapfile)
        cosmo = LambdaCDM(H0=s.header.hubble * 100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
        h = s.header.hubble
        a = 1 / (1 + s.header.redshift)

        # Load LightCone Contents
        LC = rf.LightCone_with_SN_lens(lc_file, hfname)

        # Sort Lenses according to Snapshot Number (snapnum)
        indx = np.argsort(LC['snapnum'])
        Halo_ID = LC['Halo_ID'][indx]

        # Prepatre Processes to be run in parallel
        jobs = []
        manager = multiprocessing.Manager()
        results_per_cpu = manager.dict()
        snapfile = sim_dir[sim] + 'snapdir_%03d/snap_%03d'
        for cpu in range(CPUs):
            # Load LensingMaps Contents
            lm_file = lm_dir + 'LM_Proc_' + str(cpu) + '_0.pickle'
            p = Process(target=la.dyn_vs_lensing_mass,
                        name='Proc_%d' % cpu,
                        args=(cpu, LC, lm_file, snapfile, h, scale, HQ_dir,
                              sim, sim_phy, sim_name, hfname, cosmo,
                              results_per_cpu))
            p.start()
            jobs.append(p)
        # Run Processes in parallel
        # Wait until every job is completed
        for p in jobs:
            p.join()

        # Save Data
        Halo_ID = []
        Src_ID = []
        Mdyn = []
        Mlens = []
        for cpu in range(CPUs):
            results = results_per_cpu.values()[cpu]
            for src in range(len(results)):
                Halo_ID.append(results[src][0])
                Src_ID.append(results[src][1])
                Mdyn.append(results[src][2])
                Mlens.append(results[src][3])

        la_dir = HQ_dir + '/LensingAnalysis/' + sim_phy[sim]
        print('save data', la_dir, sim_name[sim])
        sim_label = la.define_sim_label(sim_name[sim], sim_dir[sim])
        hf = h5py.File(la_dir + 'DLMass_pa_shmr_svrms' + sim_label + '.h5',
                       'w')
        hf.create_dataset('Halo_ID', data=Halo_ID)
        hf.create_dataset('Src_ID', data=Src_ID)
        hf.create_dataset('Mdyn', data=Mdyn)
        hf.create_dataset('Mlens', data=Mlens)
        hf.close()
Exemplo n.º 28
0
def create_density_maps():
    # Get command line arguments
    args = {}
    if comm_rank == 0:
        args["simdir"] = sys.argv[1]
        args["hfdir"] = sys.argv[2]
        args["snapnum"] = int(sys.argv[3])
        args["ncells"] = int(sys.argv[4])
        args["outbase"] = sys.argv[5]
        args["nfileout"] = sys.argv[6]
    args = comm.bcast(args)
    label = args["simdir"].split('/')[-2].split('_')[2]

    # Organize devision of Sub-&Halos over Processes on Proc. 0
    if comm_rank == 0:
        # Sort Sub-&Halos over Processes
        df = pd.read_csv(args["hfdir"] + 'halos_%d.dat' % args["snapnum"],
                         sep='\s+',
                         skiprows=16,
                         usecols=[0, 2, 4, 9, 10, 11],
                         names=['ID', 'Mvir', 'Vrms', 'X', 'Y', 'Z'])
        df = df[df['Mvir'] > 5e11]
        sh_id = df['ID'].values.astype('float64')
        sh_vrms = df['Vrms'].values.astype('float64')
        sh_x = df['X'].values.astype('float64')
        sh_y = df['Y'].values.astype('float64')
        sh_z = df['Z'].values.astype('float64')
        del df
        hist_edges = histedges_equalN(sh_x, comm_size)
        sh_id, sh_vrms, sh_x, sh_y, sh_z, sh_split_size_1d, sh_split_disp_1d = cluster_subhalos(
            sh_id, sh_vrms, sh_x, sh_y, sh_z, hist_edges, comm_size)

        # Sort Particles over Processes
        s = read_hdf5.snapshot(args["snapnum"], args["simdir"])
        s.read(["Coordinates", "Masses", "GFM_StellarFormationTime"],
               parttype=[0, 1, 4])
        scale = 1e-3 * s.header.hubble
        ## Dark Matter
        dm_mass = (s.data['Masses']['dm']).astype('float64')
        dm_x = (s.data['Coordinates']['dm'][:, 0] * scale).astype('float64')
        dm_y = (s.data['Coordinates']['dm'][:, 1] * scale).astype('float64')
        dm_z = (s.data['Coordinates']['dm'][:, 2] * scale).astype('float64')
        dm_mass, dm_x, dm_y, dm_z, dm_split_size_1d, dm_split_disp_1d = cluster_particles(
            dm_mass, dm_x, dm_y, dm_z, hist_edges, comm_size)
        ## Gas
        gas_mass = (s.data['Masses']['gas']).astype('float64')
        gas_x = (s.data['Coordinates']['gas'][:, 0] * scale).astype('float64')
        gas_y = (s.data['Coordinates']['gas'][:, 1] * scale).astype('float64')
        gas_z = (s.data['Coordinates']['gas'][:, 2] * scale).astype('float64')
        gas_mass, gas_x, gas_y, gas_z, gas_split_size_1d, gas_split_disp_1d = cluster_particles(
            gas_mass, gas_x, gas_y, gas_z, hist_edges, comm_size)
        ## Stars
        star_mass = (s.data['Masses']['stars']).astype('float64')
        star_x = (s.data['Coordinates']['stars'][:, 0] *
                  scale).astype('float64')
        star_y = (s.data['Coordinates']['stars'][:, 1] *
                  scale).astype('float64')
        star_z = (s.data['Coordinates']['stars'][:, 2] *
                  scale).astype('float64')
        star_age = s.data['GFM_StellarFormationTime']['stars']
        star_x = star_x[star_age >= 0]  #[Mpc]
        star_y = star_y[star_age >= 0]  #[Mpc]
        star_z = star_z[star_age >= 0]  #[Mpc]
        star_mass = star_mass[star_age >= 0]
        del star_age
        star_mass, star_x, star_y, star_z, star_split_size_1d, star_split_disp_1d = cluster_particles(
            star_mass, star_x, star_y, star_z, hist_edges, comm_size)

        # Define Cosmology
        cosmo = LambdaCDM(H0=s.header.hubble * 100,
                          Om0=s.header.omega_m,
                          Ode0=s.header.omega_l)
        cosmosim = {
            'omega_M_0': s.header.omega_m,
            'omega_lambda_0': s.header.omega_l,
            'omega_k_0': 0.0,
            'h': s.header.hubble
        }
        redshift = s.header.redshift
        print(': Redshift: %f' % redshift)
    else:
        sh_id = None
        sh_vrms = None
        sh_x = None
        sh_y = None
        sh_z = None
        sh_split_size_1d = None
        sh_split_disp_1d = None
        dm_mass = None
        dm_x = None
        dm_y = None
        dm_z = None
        dm_split_size_1d = None
        dm_split_disp_1d = None
        gas_mass = None
        gas_x = None
        gas_y = None
        gas_z = None
        gas_split_size_1d = None
        gas_split_disp_1d = None
        star_mass = None
        star_x = None
        star_y = None
        star_z = None
        star_split_size_1d = None
        star_split_disp_1d = None
        cosmosim = None
        cosmo = None
        redshift = None

    # Broadcast variables over all processors
    sh_split_size_1d = comm.bcast(sh_split_size_1d, root=0)
    sh_split_disp_1d = comm.bcast(sh_split_disp_1d, root=0)
    dm_split_size_1d = comm.bcast(dm_split_size_1d, root=0)
    dm_split_disp_1d = comm.bcast(dm_split_disp_1d, root=0)
    gas_split_size_1d = comm.bcast(gas_split_size_1d, root=0)
    gas_split_disp_1d = comm.bcast(gas_split_disp_1d, root=0)
    star_split_size_1d = comm.bcast(star_split_size_1d, root=0)
    star_split_disp_1d = comm.bcast(star_split_disp_1d, root=0)
    cosmo = comm.bcast(cosmo, root=0)
    redshift = comm.bcast(redshift, root=0)

    # Initiliaze variables for each processor
    sh_id_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_vrms_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_x_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_y_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    sh_z_local = np.zeros((int(sh_split_size_1d[comm_rank])))
    dm_mass_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_x_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_y_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    dm_z_local = np.zeros((int(dm_split_size_1d[comm_rank])))
    gas_mass_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_x_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_y_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    gas_z_local = np.zeros((int(gas_split_size_1d[comm_rank])))
    star_mass_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_x_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_y_local = np.zeros((int(star_split_size_1d[comm_rank])))
    star_z_local = np.zeros((int(star_split_size_1d[comm_rank])))

    # Devide Data over Processes
    comm.Scatterv([sh_id, sh_split_size_1d, sh_split_disp_1d, MPI.DOUBLE],
                  sh_id_local,
                  root=0)
    comm.Scatterv([sh_vrms, sh_split_size_1d, sh_split_disp_1d, MPI.DOUBLE],
                  sh_vrms_local,
                  root=0)
    comm.Scatterv([sh_x, sh_split_size_1d, sh_split_disp_1d, MPI.DOUBLE],
                  sh_x_local,
                  root=0)
    comm.Scatterv([sh_y, sh_split_size_1d, sh_split_disp_1d, MPI.DOUBLE],
                  sh_y_local,
                  root=0)
    comm.Scatterv([sh_z, sh_split_size_1d, sh_split_disp_1d, MPI.DOUBLE],
                  sh_z_local,
                  root=0)

    comm.Scatterv([dm_x, dm_split_size_1d, dm_split_disp_1d, MPI.DOUBLE],
                  dm_x_local,
                  root=0)
    comm.Scatterv([dm_y, dm_split_size_1d, dm_split_disp_1d, MPI.DOUBLE],
                  dm_y_local,
                  root=0)
    comm.Scatterv([dm_z, dm_split_size_1d, dm_split_disp_1d, MPI.DOUBLE],
                  dm_z_local,
                  root=0)
    comm.Scatterv([dm_mass, dm_split_size_1d, dm_split_disp_1d, MPI.DOUBLE],
                  dm_mass_local,
                  root=0)

    comm.Scatterv([gas_x, gas_split_size_1d, gas_split_disp_1d, MPI.DOUBLE],
                  gas_x_local,
                  root=0)
    comm.Scatterv([gas_y, gas_split_size_1d, gas_split_disp_1d, MPI.DOUBLE],
                  gas_y_local,
                  root=0)
    comm.Scatterv([gas_z, gas_split_size_1d, gas_split_disp_1d, MPI.DOUBLE],
                  gas_z_local,
                  root=0)
    comm.Scatterv([gas_mass, gas_split_size_1d, gas_split_disp_1d, MPI.DOUBLE],
                  gas_mass_local,
                  root=0)

    comm.Scatterv([star_x, star_split_size_1d, star_split_disp_1d, MPI.DOUBLE],
                  star_x_local,
                  root=0)
    comm.Scatterv([star_y, star_split_size_1d, star_split_disp_1d, MPI.DOUBLE],
                  star_y_local,
                  root=0)
    comm.Scatterv([star_z, star_split_size_1d, star_split_disp_1d, MPI.DOUBLE],
                  star_z_local,
                  root=0)
    comm.Scatterv(
        [star_mass, star_split_size_1d, star_split_disp_1d, MPI.DOUBLE],
        star_mass_local,
        root=0)
    print(
        ': Proc. %d got: \n\t %d Sub-&Halos \n\t %d dark matter \n\t %d gas \n\t %d stars \n'
        % (comm_rank, int(sh_split_size_1d[comm_rank]),
           int(dm_split_size_1d[comm_rank]), int(gas_split_size_1d[comm_rank]),
           int(star_split_size_1d[comm_rank])))

    comm.Barrier()

    SH = {
        "ID": sh_id_local,
        "Vrms": sh_vrms_local,
        "Pos": np.transpose([sh_x_local, sh_y_local, sh_z_local])
    }
    DM = {
        "Mass": np.unique(dm_mass_local),
        "Pos": np.transpose([dm_x_local, dm_y_local, dm_z_local])
    }
    Gas = {
        "Mass": gas_mass_local,
        "Pos": np.transpose([gas_x_local, gas_y_local, gas_z_local])
    }
    Star = {
        "Mass": star_mass_local,
        "Pos": np.transpose([star_x_local, star_y_local, star_z_local])
    }

    sigma_tot = []
    subhalo_id = []
    FOV = []
    ## Run over Sub-&Halos
    for ll in range(len(SH['ID'])):
        # Define field-of-view
        c = (const.c).to_value('km/s')
        fov_rad = 4 * np.pi * (SH['Vrms'][ll] / c)**2
        #TODO: for z=0 sh_dist=0!!!
        sh_dist = (cosmo.comoving_distance(redshift)).to_value('Mpc')
        alpha = 6  # multiplied by 4 because of Oguri&Marshall
        fov_Mpc = alpha * fov_rad * sh_dist  # is it the diameter?

        dm_sigma, h = DMf.projected_surface_density_smooth(
            DM['Pos'],  #[Mpc]
            SH['Pos'][ll],
            fov_Mpc,  #[Mpc]
            args["ncells"])
        dm_sigma *= DM['Mass']
        gas_sigma = projected_surface_density(
            Gas['Pos'],  #*a/h,
            Gas['Mass'],
            SH['Pos'][ll],  #*a/h,
            fov_Mpc,
            args["ncells"])
        gas_sigma = gaussian_filter(gas_sigma, sigma=h)
        star_sigma = projected_surface_density(
            Star['Pos'],  #*a/h,
            Star['Mass'],
            SH['Pos'][ll],  #*a/h,
            fov_Mpc,
            args["ncells"])
        star_sigma = gaussian_filter(star_sigma, sigma=h)

        # Check if all density maps are empty
        if ((np.count_nonzero(dm_sigma) == args["ncells"]**2)
                and (np.count_nonzero(gas_sigma) == (args["ncells"])**2)
                and (np.count_nonzero(star_sigma) == (args["ncells"])**2)):
            continue
        sigmatotal = dm_sigma + gas_sigma + star_sigma

        sigma_tot.append(sigmatotal)
        subhalo_id.append(int(SH['ID'][ll]))
        FOV.append(fov_Mpc)

    fname = args["outbase"] + 'z_' + str(
        args["snapnum"]) + '/' + 'DM_' + label + '_' + str(comm_rank) + '.h5'
    hf = h5py.File(fname, 'w')
    hf.create_dataset('density_map', data=sigma_tot)
    hf.create_dataset('subhalo_id', data=np.asarray(subhalo_id))
    hf.create_dataset('fov_width', data=np.asarray(FOV))
    #RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
    hf.close()