コード例 #1
0
def get_particle_data(path, snap, part_type, soft):

    # Get positions masses and smoothing lengths
    poss = E.read_array('SNAP',
                        path,
                        snap,
                        'PartType' + str(part_type) + '/Coordinates',
                        noH=True,
                        physicalUnits=False,
                        numThreads=8)
    if part_type != 1:
        masses = E.read_array('SNAP',
                              path,
                              snap,
                              'PartType' + str(part_type) + '/Mass',
                              noH=True,
                              physicalUnits=False,
                              numThreads=8) * 10**10
        smls = E.read_array('SNAP',
                            path,
                            snap,
                            'PartType' + str(part_type) + '/SmoothingLength',
                            noH=True,
                            physicalUnits=False,
                            numThreads=8)
    else:
        masses = np.ones(poss.shape[0])
        smls = np.full_like(masses, soft)

    return poss, masses, smls
コード例 #2
0
ファイル: flares.py プロジェクト: aswinpvijayan/flares
    def spherical_region(self, sim, snap):
        """
        Inspired from David Turner's suggestion
        """

        dm_cood = E.read_array('PARTDATA',
                               sim,
                               snap,
                               '/PartType1/Coordinates',
                               noH=False,
                               physicalUnits=False,
                               numThreads=4)  #dm particle coordinates

        hull = ConvexHull(dm_cood)

        cen = [
            np.median(dm_cood[:, 0]),
            np.median(dm_cood[:, 1]),
            np.median(dm_cood[:, 2])
        ]
        pedge = dm_cood[hull.vertices]  #edge particles
        y_obs = np.zeros(len(pedge))
        p0 = np.append(cen, self.radius)

        popt, pcov = curve_fit(self._sphere,
                               pedge,
                               y_obs,
                               p0,
                               method='lm',
                               sigma=np.ones(len(pedge)) * 0.001)
        dist = np.sqrt(np.sum((pedge - popt[:3])**2, axis=1))
        centre, radius, mindist = popt[:3], popt[3], np.min(dist)

        return centre, radius, mindist
コード例 #3
0
def speed_test(threads=1):
    start = time.time()

    mstar = E.read_array('SUBFIND',
                         directory,
                         tag,
                         "/Subhalo/Stars/Mass",
                         numThreads=threads,
                         noH=True,
                         physicalUnits=True)

    end = time.time()
    print("Threads: %i\nTime elapsed: %.4f\n" % (threads, end - start))
コード例 #4
0
def spherical_region(sim, snap, coods=None):
    """
    Inspired from David Turner's suggestion
    """

    if coods is None:
        print("Using dark matter particles to define region, Loading...")
        coods = E.read_array('PARTDATA',
                             sim,
                             snap,
                             '/PartType1/Coordinates',
                             noH=True,
                             physicalUnits=False,
                             numThreads=4)  # dm particle coordinates

    hull = ConvexHull(coods)

    print('Defined convex hull')

    cen = [
        np.median(coods[:, 0]),
        np.median(coods[:, 1]),
        np.median(coods[:, 2])
    ]
    pedge = coods[hull.vertices]  #edge particles
    y_obs = np.zeros(len(pedge))
    p0 = np.append(cen, 15 / 0.677)

    print('Defined convex hull')

    popt, pcov = curve_fit(_sphere,
                           pedge,
                           y_obs,
                           p0,
                           method='lm',
                           sigma=np.ones(len(pedge)) * 0.001)
    dist = np.sqrt(np.sum((pedge - popt[:3])**2, axis=1))
    centre, radius, mindist = popt[:3], popt[3], np.min(dist)

    print('computed fit')

    return centre, radius, mindist
コード例 #5
0
def recalculate_derived_subhalo_properties(inp, num, tag, S_len, G_len, D_len, \
                                   S_index, G_index, D_index, data_folder = 'data/'):
    """
    Recalculate subhalo properties, such as the stellar/total mass and SFR,
    after inclusion of spurious galaxies.
    """

    if inp == 'FLARES':
        sim_type = 'FLARES'
        fl = flares.flares(fname = F'./{data_folder}/',sim_type=sim_type)
        _dir = fl.directory
        sim = F"{_dir}GEAGLE_{num}/data/"

    elif inp == 'REF':
        sim_type = 'PERIODIC'
        fl = flares.flares(fname = F'./{data_folder}/',sim_type=sim_type)
        sim = fl.ref_directory

    elif inp == 'AGNdT9':
        sim_type = 'PERIODIC'
        fl = flares.flares(fname = F'./{data_folder}/',sim_type=sim_type)
        sim = fl.agn_directory

    else:
        ValueError("Type of input simulation not recognized")


    # gp_sfr = E.read_array('PARTDATA', sim, tag, '/PartType0/StarFormationRate', noH=True, physicalUnits=True, numThreads=1)
    try:
        gp_mass = E.read_array('PARTDATA', sim, tag, '/PartType0/Mass',
                               noH=True, physicalUnits=True, numThreads=1)
    except:
        gp_mass = np.array([])

    try:
        sp_mass = E.read_array('PARTDATA', sim, tag, '/PartType4/Mass',
                               noH=True, physicalUnits=True, numThreads=1)
    except:
        sp_mass = np.array([])

    try:
        dm_pmass = E.read_header('PARTDATA',sim,tag,'MassTable')[1] /\
                   E.read_header('PARTDATA',sim,tag,'HubbleParam')
    except:
        dm_pmasss = np.array([])

    sbegin = np.zeros(len(S_len), dtype = np.int64)
    send = np.zeros(len(S_len), dtype = np.int64)
    sbegin[1:] = np.cumsum(S_len)[:-1]
    send = np.cumsum(S_len)

    gbegin = np.zeros(len(G_len), dtype = np.int64)
    gend = np.zeros(len(G_len), dtype = np.int64)
    gbegin[1:] = np.cumsum(G_len)[:-1]
    gend = np.cumsum(G_len)

    SMass = np.zeros(len(S_len))
    GMass = np.zeros(len(G_len))
    # total_SFR = np.zeros(len(S_len))

    for jj in range(len(sbegin)):
        SMass[jj] = np.sum(sp_mass[S_index[sbegin[jj]:send[jj]]])
        GMass[jj] = np.sum(gp_mass[G_index[gbegin[jj]:gend[jj]]])
        # total_SFR[jj] = np.sum(gp_sfr[G_index[gbegin[jj]:gend[jj]]])

    DMass = D_len * dm_pmass

    return SMass, GMass, DMass # , total_SFR
コード例 #6
0
def extract_info(num, tag, inp='FLARES'):

    """
    Returns set of pre-defined properties of galaxies from a
    region in FLARES `num` for `tag`. Selects only galaxies
    with more than 100 star+gas particles inside 30pkpc
    ----------
    Args:
        num : str
            the FLARES/G-EAGLE id of the sim; eg: '00', '01', ...
        tag : str
            the file tag; eg: '000_z015p00', '001_z014p000',...., '011_z004p770'

    """

    ## MPI parameters
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    print (F"Extracing information from {inp} {num} {tag} (rank: {rank}, size: {size})")

    if inp == 'FLARES':
        sim_type = 'FLARES'
        fl = flares.flares(fname = './data/',sim_type=sim_type)
        _dir = fl.directory
        sim = F"{_dir}GEAGLE_{num}/data/"

    elif inp == 'REF':
        sim_type = 'PERIODIC'
        fl = flares.flares(fname = './data/',sim_type=sim_type)
        sim = fl.ref_directory

    elif inp == 'AGNdT9':
        sim_type = 'PERIODIC'
        fl = flares.flares(fname = './data/',sim_type=sim_type)
        sim = fl.agn_directory

    else:
        ValueError("Type of input simulation not recognized")

    if rank == 0:
        print (F"Sim location: {sim}, tag: {tag}")

    #Simulation redshift
    z = E.read_header('SUBFIND', sim, tag, 'Redshift')
    a = E.read_header('SUBFIND', sim, tag, 'ExpansionFactor')
    boxl = E.read_header('SUBFIND', sim, tag, 'BoxSize')/E.read_header('SUBFIND', sim, tag, 'HubbleParam')

    ####### Galaxy global properties  #######
    # SubhaloMass = E.read_array('SUBFIND', sim, tag, '/Subhalo/Mass', numThreads=4, noH=True, physicalUnits=True)
    Maperture = E.read_array('SUBFIND', sim, tag, '/Subhalo/ApertureMeasurements/Mass/030kpc', numThreads=4, noH=True, physicalUnits=True)
    mstar = Maperture[:,4]
    sgrpno = E.read_array('SUBFIND', sim, tag, '/Subhalo/SubGroupNumber', numThreads=4)
    grpno = E.read_array('SUBFIND', sim, tag, '/Subhalo/GroupNumber', numThreads=4)


    if inp == 'FLARES':
        ## Selecting the subhalos within our region
        cop = E.read_array('SUBFIND', sim, tag, '/Subhalo/CentreOfPotential', noH=False, physicalUnits=False, numThreads=4) #units of cMpc/h
        cen, r, min_dist = fl.spherical_region(sim, tag)  #units of cMpc/h
        indices = np.where((mstar*1e10 >= 10**7.) & (norm(cop-cen, axis=1)<=fl.radius))[0]

    else:
        indices = np.where(mstar*1e10 >= 10**7.)[0]

    cop = E.read_array('SUBFIND', sim, tag, '/Subhalo/CentreOfPotential', noH=True, physicalUnits=True, numThreads=4)
    # sfr_inst =  E.read_array('SUBFIND', sim, tag, '/Subhalo/ApertureMeasurements/SFR/030kpc', numThreads=4, noH=True, physicalUnits=True)


    ####### Particle properties #######

    #dm particle
    dm_cood = E.read_array('PARTDATA', sim, tag, '/PartType1/Coordinates', noH=True, physicalUnits=True, numThreads=4)
    dm_sgrpn = E.read_array('PARTDATA', sim, tag, '/PartType1/SubGroupNumber', numThreads=4)
    dm_grpn = E.read_array('PARTDATA', sim, tag, '/PartType1/GroupNumber', numThreads=4)

    #Gas particle
    gp_cood = E.read_array('PARTDATA', sim, tag, '/PartType0/Coordinates', noH=True, physicalUnits=True, numThreads=4)
    gp_sgrpn = E.read_array('PARTDATA', sim, tag, '/PartType0/SubGroupNumber', numThreads=4)
    gp_grpn = E.read_array('PARTDATA', sim, tag, '/PartType0/GroupNumber', numThreads=4)
    gp_sfr = E.read_array('PARTDATA', sim, tag, '/PartType0/StarFormationRate', noH=True, physicalUnits=True, numThreads=4)

    #Star particle
    try:
        sp_cood = E.read_array('PARTDATA', sim, tag, '/PartType4/Coordinates', noH=True, physicalUnits=True, numThreads=4)
        sp_sgrpn = E.read_array('PARTDATA', sim, tag, '/PartType4/SubGroupNumber', numThreads=4)
        sp_grpn = E.read_array('PARTDATA', sim, tag, '/PartType4/GroupNumber', numThreads=4)
        SP = True
    except:
        SP = False
        print("No star particles found")


    #Black hole particle
    """
    Subgrid properties are the ones required.
    Only at high masses are the subhalo and particle properties trace each other
    """
    try:
        bh_sgrpn = E.read_array('PARTDATA', sim, tag, '/PartType5/SubGroupNumber', numThreads=4)
        bh_grpn = E.read_array('PARTDATA', sim, tag, '/PartType5/GroupNumber', numThreads=4)
        bh_mass = E.read_array('PARTDATA', sim, tag, '/PartType5/BH_Mass', noH=True, physicalUnits=True, numThreads=4)
        bh_cood = E.read_array('PARTDATA', sim, tag, '/PartType5/Coordinates', numThreads=4, noH=True, physicalUnits=True)
        BH = True
    except:
        BH = False
        print("No Black hole particles found")




    ###########################  For identifying spurious galaxies and remerging them to the parent  ###########################

    #First method: just using the EAGLE method of merging them
    #to the nearby subhalo
    #Second method: use Will's criterion in MEGA to merge only
    #particles of those group that are identified as single
    #entities ----- not done for now

    #Identifying the index of the spurious array within the
    #array `indices`

    spurious_indices = np.where((Maperture[:,0][indices] == 0) | (Maperture[:,1][indices] == 0) | (Maperture[:,4][indices] == 0))[0]
    if len(spurious_indices)>0:
        #Calculating the distance of the spurious to the other subhalos
        dist_to_others = cdist(cop[indices[spurious_indices]], cop[indices])

        #To take into account the fact that the spurious subhalos
        #themselves as well as others are present within
        #`indices` at the moment
        dist_to_others[:, spurious_indices] = np.nan

        #Parent is classified as the nearest subhalo to the spurious
        parent = indices[np.nanargmin(dist_to_others, axis=1)]

        #returns the index of the parent and its associated spurious
        #as an array of arrays. `spurious_of_parent` is linked to
        #the `spurious` which is defined below so you can get the
        #original index back wrt to the whole dataset
        parent, spurious_of_parent = ndix_unique(parent)

        #remove the spurious from indices so they aren't counted twice
        #in the subhalo/particle property collection, but retain
        #information (`spurious` array) on where they are within the
        #whole dataset for later use
        spurious = indices[spurious_indices]
        indices = np.delete(indices, spurious_indices)

        del spurious_indices, dist_to_others
        sp_ok = True

    else:
        sp_ok = False

    gc.collect()


    comm.Barrier()

    part = int(len(indices)/size)
    num_subhalos = int(len(sgrpno))

    if rank == 0:
        if inp != 'FLARES': num = ''
        print("Extracting required properties for {} subhalos from {} region {} at z = {} of boxsize = {}".format(len(indices), inp, num, z, boxl))

    #For getting black hole subgrid masses
    tbhindex = np.zeros(num_subhalos, dtype = np.int32)
    tbh_cood = np.zeros((num_subhalos, 3), dtype = np.float64)
    tbh_mass = np.zeros(num_subhalos, dtype = np.float32)

    tsindex = np.zeros(num_subhalos, dtype = np.int32)


    if inp == 'FLARES':
        if rank!=size-1:
            thisok = indices[rank*part:(rank+1)*part]
        else:
            thisok = indices[rank*part:]

    else:
        #Size needs to be a perfect cube to work
        l = boxl / (size)**(1/3)
        sz = (size)**(1/3)
        dl = 10.
        xyz = np.zeros((size,8,3))
        count=0
        for xx in range(int(sz)):
            for yy in range(int(sz)):
                for zz in range(int(sz)):
                    xyz[count] = np.array([[xx, yy, zz], [xx+1, yy, zz], [xx, yy+1, zz], [xx, yy, zz+1], [xx+1, yy+1, zz], [xx+1, yy, zz+1], [xx, yy+1, zz+1], [xx+1, yy+1, zz+1]])
                    count+=1


        this_xyz = xyz[rank]*l
        max_xyz = np.max(this_xyz, axis=0)
        min_xyz = np.min(this_xyz, axis=0)


        thisok = np.ones(len(indices), dtype=bool)
        for xx in range(3):
            thisok*=np.logical_and(cop[indices][:,xx]/a>=min_xyz[xx], cop[indices][:,xx]/a<=max_xyz[xx])
        thisok = indices[thisok]
        # print (thisok, rank, max_xyz, min_xyz)
        # print (cop[thisok])

        #Dividing the gas particles into a cell for current task
        dd = np.ones(len(dm_cood), dtype=bool)
        for xx in range(3):
            dd*=np.logical_or((min_xyz[xx]-dl<=dm_cood[:,xx]/a)*(dm_cood[:,xx]/a<=max_xyz[xx]+dl), np.logical_or((min_xyz[xx]-dl<=dm_cood[:,xx]/a+boxl)*(dm_cood[:,xx]/a+boxl<=max_xyz[xx]+dl), (min_xyz[xx]-dl<=dm_cood[:,xx]/a-boxl)*(dm_cood[:,xx]/a-dl<=max_xyz[xx]+dl)))
        dd = np.where(dd)[0]

        dm_cood = dm_cood[dd]
        dm_sgrpn = dm_sgrpn[dd]
        dm_grpn = dm_grpn[dd]

        gg = np.ones(len(gp_cood), dtype=bool)
        for xx in range(3):
            gg*=np.logical_or((min_xyz[xx]-dl<=gp_cood[:,xx]/a)*(gp_cood[:,xx]/a<=max_xyz[xx]+dl), np.logical_or((min_xyz[xx]-dl<=gp_cood[:,xx]/a+boxl)*(gp_cood[:,xx]/a+boxl<=max_xyz[xx]+dl), (min_xyz[xx]-dl<=gp_cood[:,xx]/a-boxl)*(gp_cood[:,xx]/a-dl<=max_xyz[xx]+dl)))
        gg = np.where(gg)[0]

        gp_cood = gp_cood[gg]
        gp_sgrpn = gp_sgrpn[gg]
        gp_grpn = gp_grpn[gg]
        gp_sfr = gp_sfr[gg]

        #Dividing the star particles into a cell for current task
        if SP:
            ss = np.ones(len(sp_cood), dtype=bool)
            for xx in range(3):
                ss*=np.logical_or((min_xyz[xx]-dl<=sp_cood[:,xx]/a)*(sp_cood[:,xx]/a<=max_xyz[xx]+dl), np.logical_or((min_xyz[xx]/a-dl<=sp_cood[:,xx]/a+boxl)*(sp_cood[:,xx]/a+boxl<=max_xyz[xx]+dl), (min_xyz[xx]-dl<=sp_cood[:,xx]/a-boxl)*(sp_cood[:,xx]/a-boxl<=max_xyz[xx]+dl)))
            ss = np.where(ss)[0]

            sp_cood = sp_cood[ss]
            sp_sgrpn = sp_sgrpn[ss]
            sp_grpn = sp_grpn[ss]

        #Dividing the black hole particles into a cell for current task
        if BH:
            bb = np.ones(len(bh_cood), dtype=bool)
            for xx in range(3):
                bb*=np.logical_or((min_xyz[xx]-dl<=bh_cood[:,xx]/a)*(bh_cood[:,xx]/a<=max_xyz[xx]+dl), np.logical_or((min_xyz[xx]-dl<=bh_cood[:,xx]/a+boxl)*(bh_cood[:,xx]/a+boxl<=max_xyz[xx]+dl), (min_xyz[xx]-dl<=bh_cood[:,xx]/a-boxl)*(bh_cood[:,xx]/a-boxl<=max_xyz[xx]+dl)))
            bb = np.where(bb)[0]

            bh_sgrpn = bh_sgrpn[bb]
            bh_grpn = bh_grpn[bb]
            bh_mass = bh_mass[bb]
            bh_cood = bh_cood[bb]

    gc.collect()

    tdnum = np.zeros(len(thisok)+1, dtype = np.int32)
    tsnum = np.zeros(len(thisok)+1, dtype = np.int32)
    tgnum = np.zeros(len(thisok)+1, dtype = np.int32)
    ind = np.array([])

    tdindex = np.zeros(len(dm_grpn), dtype = np.int32)
    tgindex = np.zeros(len(gp_grpn), dtype = np.int32)

    if SP:
        tsindex = np.zeros(len(sp_grpn), dtype = np.int32)

    gc.collect()

    kk = 0
    # dist = 0.1 #in pMpc for 100 pkpc Aperture, writes out particle properties within this aperture
    sel_dist = 0.03 #in pMpc for 30 pkpc Aperture, only galaxies with more than 100 star + gas particles within this aperture is written out to the master file. Only spurious galaxies within 30 pkpc are selected
    bounds = np.array([boxl, boxl, boxl])   #https://stackoverflow.com/a/11109244
    for ii, jj in enumerate(thisok):

        #start = timeit.default_timer()

        d_ok = np.where((dm_sgrpn-sgrpno[jj]==0) & (dm_grpn-grpno[jj]==0))[0]

        g_ok = np.where((gp_sgrpn-sgrpno[jj]==0) & (gp_grpn-grpno[jj]==0))[0]
        tmp = gp_cood[g_ok]-cop[jj]
        if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
        g_ok_sel = g_ok[norm(tmp,axis=1)<=sel_dist]

        if SP:
            s_ok = np.where((sp_sgrpn-sgrpno[jj]==0) & (sp_grpn-grpno[jj]==0))[0]
            tmp = sp_cood[s_ok]-cop[jj]
            if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
            s_ok_sel = s_ok[norm(tmp,axis=1)<=sel_dist]
        else:
            s_ok = np.array([])
            s_ok_sel = np.array([])

        if BH:
            bh_ok = np.where((bh_sgrpn-sgrpno[jj]==0) & (bh_grpn-grpno[jj]==0))[0]

        if sp_ok:
            if jj in parent:
                this_spurious = np.where(parent == jj)[0]

                for _jj in spurious[spurious_of_parent[this_spurious[0]]]:

                    #To apply Will's recombine method, it should
                    #be applied here, instead of the next block

                    spurious_d_ok = np.where((dm_sgrpn-sgrpno[_jj]==0) & (dm_grpn-grpno[_jj]==0))[0]
                    tmp = dm_cood[spurious_d_ok]-cop[jj]
                    if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
                    d_ok = np.append(d_ok, spurious_d_ok[norm(tmp,axis=1)<=sel_dist])

                    spurious_g_ok = np.where((gp_sgrpn-sgrpno[_jj]==0) & (gp_grpn-grpno[_jj]==0))[0]
                    tmp = gp_cood[spurious_g_ok]-cop[jj]
                    if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
                    g_ok = np.append(g_ok, spurious_g_ok[norm(tmp,axis=1)<=sel_dist])
                    g_ok_sel = np.append(g_ok_sel, spurious_g_ok[norm(tmp,axis=1)<=sel_dist])

                    if SP:
                        spurious_s_ok = np.where((sp_sgrpn-sgrpno[_jj]==0) & (sp_grpn-grpno[_jj]==0))[0]
                        tmp = sp_cood[spurious_s_ok]-cop[jj]
                        if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
                        s_ok = np.append(s_ok, spurious_s_ok[norm(tmp,axis=1)<=sel_dist])
                        s_ok_sel = np.append(s_ok_sel, spurious_s_ok[norm(tmp,axis=1)<=sel_dist])

                    if BH:
                        spurious_bh_ok = np.where((bh_sgrpn-sgrpno[_jj]==0) & (bh_grpn-grpno[_jj]==0))[0]
                        tmp = bh_cood[spurious_bh_ok]-cop[jj]
                        if inp!='FLARES': tmp = np.min(np.dstack(((tmp) % bounds, (-tmp) % bounds)), axis = 2)
                        bh_ok = np.append(bh_ok, spurious_bh_ok[norm(tmp,axis=1)<=sel_dist])

                #Add in here the subhalo properties that needed
                #to be added due to spurious
                # tsfr_inst_spurious[jj] = np.sum(gp_sfr[g_ok])
                # tmstar_spurious[jj] = np.sum(mstar[spurious[spurious_of_parent[this_spurious[0]]]])
                # tSubhaloMass_spurious[jj] = np.sum(SubhaloMass[spurious[spurious_of_parent[this_spurious[0]]]])


        #stop = timeit.default_timer()

        if len(s_ok_sel) + len(g_ok_sel) >= 100:

            #print ("Calculating indices took {}s".format(np.round(stop - start,6)))
            # start = timeit.default_timer()

            #Extracting subgrid black hole properties
            if BH:
                if len(bh_ok>0):

                    tbh_max_index = np.argmax(bh_mass[bh_ok])
                    tbh_mass[jj] = bh_mass[bh_ok[tbh_max_index]]

                    if inp=='FLARES':
                        tbhindex[jj] = bh_ok[tbh_max_index]
                    else:
                        tbhindex[jj] = bb[bh_ok[tbh_max_index]]

            if SP:
                tsnum[kk+1] = len(s_ok)
                scum = np.cumsum(tsnum)
                sbeg = scum[kk]
                send = scum[kk+1]
                if inp=='FLARES':
                    tsindex[sbeg:send] = s_ok
                else:
                    tsindex[sbeg:send] = ss[s_ok]


            tdnum[kk+1] = len(d_ok)
            tgnum[kk+1] = len(g_ok)
            dcum = np.cumsum(tdnum)
            gcum = np.cumsum(tgnum)
            dbeg = dcum[kk]
            dend = dcum[kk+1]
            gbeg = gcum[kk]
            gend = gcum[kk+1]

            if inp=='FLARES':
                tdindex[dbeg:dend] = d_ok
                tgindex[gbeg:gend] = g_ok
            else:
                tdindex[dbeg:dend] = dd[d_ok]
                tgindex[gbeg:gend] = gg[g_ok]

            # stop = timeit.default_timer()
            # print ("Assigning arrays took {}s".format(np.round(stop - start,6)))
            gc.collect()

            kk+=1

        else:

            ind = np.append(ind, ii)

    ##End of loop ii, jj##

    del dm_sgrpn, dm_grpn, dm_cood, gp_sgrpn, gp_grpn, gp_cood, gp_sfr
    if SP: del sp_sgrpn, sp_grpn, sp_cood,
    if BH: del bh_sgrpn, bh_grpn, bh_mass

    gc.collect()


    thisok = np.delete(thisok, ind.astype(int))
    tbhindex = tbhindex[thisok]
    tbh_mass = tbh_mass[thisok]

    tdtot = np.sum(tdnum)
    tstot = np.sum(tsnum)
    tgtot = np.sum(tgnum)

    tdnum = tdnum[1:len(thisok)+1]
    tsnum = tsnum[1:len(thisok)+1]
    tgnum = tgnum[1:len(thisok)+1]

    tdindex = tdindex[:tdtot]
    tsindex = tsindex[:tstot]
    tgindex = tgindex[:tgtot]

    comm.Barrier()

    gc.collect()


    if rank == 0:
        print ("Gathering data from different processes")

    indices = comm.gather(thisok, root=0)


    bhindex = comm.gather(tbhindex, root=0)
    bh_mass = comm.gather(tbh_mass, root=0)

    del thisok, tbhindex, tbh_mass
    gc.collect()

    dnum = comm.gather(tdnum, root=0)
    del tdnum
    snum = comm.gather(tsnum, root=0)
    del tsnum
    gnum = comm.gather(tgnum, root=0)
    del tgnum

    dindex = comm.gather(tdindex, root=0)
    del tdindex
    sindex = comm.gather(tsindex, root=0)
    del tsindex
    gindex = comm.gather(tgindex, root=0)
    del tgindex

    gc.collect()

    ok_centrals = 0.

    if rank == 0:

        print ("Gathering completed")

        indices = np.concatenate(np.array(indices))
        dindex = np.concatenate(np.array(dindex))
        sindex = np.concatenate(np.array(sindex))
        gindex = np.concatenate(np.array(gindex))
        bhindex = np.concatenate(np.array(bhindex))

        bh_mass = np.concatenate(np.array(bh_mass))

        dnum = np.concatenate(np.array(dnum))
        snum = np.concatenate(np.array(snum))
        gnum = np.concatenate(np.array(gnum))

        ok_centrals = grpno[indices] - 1

        cop = cop[indices]/a
        sgrpno = sgrpno[indices]
        grpno = grpno[indices]


    return ok_centrals, indices, sgrpno, grpno, cop, dnum, snum, gnum, dindex, sindex, gindex, bhindex, bh_mass
コード例 #7
0
with open('selection.json', 'r') as fp:
    selection = json.load(fp)

nthr = 8
tag = '008_z007p000'

for region in np.unique(selection['region']):
    print("Region:", region)

    direc = '/cosma7/data/dp004/dc-love2/data/G-EAGLE/geagle_%04d/data' % int(
        region)

    pgas = E.read_array("SNAPSHOT",
                        direc,
                        tag,
                        "/PartType0/Coordinates",
                        numThreads=nthr,
                        noH=True,
                        physicalUnits=True)

    for i in np.where(np.array(selection['region']) == region)[0]:
        print("Galaxy:", i)
        c = np.array(selection['Coods'])[i]

        _idx = np.where((grp["%02d"%region][tag] == np.array(selection['GroupNumber'])[i]) & \
                        (sgrp["%02d"%region][tag] == np.array(selection['SubGroupNumber'])[i]))[0]

        dl = 0.02
        pmask = (pgas[:, 0] > c[0] - dl)
        pmask[pmask] = (pgas[pmask, 0] < c[0] + dl)
        pmask[pmask] = (pgas[pmask, 1] > c[1] - dl)
コード例 #8
0
import eagle_IO.eagle_IO as Eio
import eagle as E

directory = '/cosma5/data/Eagle/ScienceRuns/Planck1/L0050N0752/PE/S15_AGNdT9/data'
tag = '003_z008p988'
N = 10

files = Eio.get_files('SUBFIND', directory, tag)
# print(files)

print("\n\nnoH = True | physicalUnits = True\n")

mstar = Eio.read_array('SUBFIND',
                       directory,
                       tag,
                       "/Subhalo/Stars/Mass",
                       numThreads=1,
                       noH=True,
                       physicalUnits=True)
print(mstar.shape, mstar[:N])

mstar = E.readArray('SUBFIND',
                    directory,
                    tag,
                    "/Subhalo/Stars/Mass",
                    numThreads=1,
                    noH=True,
                    physicalUnits=True,
                    verbose=True)
print(mstar.shape, mstar[:N])
コード例 #9
0
                sel = sindex
            else:
                nok = np.where(bh_mass==0)[0]
                sel = bhindex
                location = 'Galaxy'
        else:
            tmp = 'SUBFIND'
            location = 'Galaxy'
            if 'FOF' in path:
                sel = ok_centrals
            else:
                sel = indices

        sel = np.asarray(sel, dtype=np.int64)
        try:
            out = E.read_array(tmp, sim, tag, path, noH=True, physicalUnits=True, numThreads=nThreads, CGS=eval(CGS))[sel]
        except:
            print("read_array failed")

            if 'coordinates' in path.lower():
                out = np.zeros((len(indices),3))
            elif 'velocity' in path.lower():
                out = np.zeros((len(indices),3))
            elif 'halfmassrad' in path.lower():
                out = np.zeros((len(indices),6))
            else:
                out = np.zeros(len(indices))


        if 'age' in name.lower(): out = fl.get_age(out, z, nThreads)
        if 'PartType5' in path:
コード例 #10
0
ファイル: flares.py プロジェクト: aswinpvijayan/flares
    def get_subgroup_part_inds(self,
                               sim,
                               snapshot,
                               part_type,
                               all_parts=False,
                               sorted=False):
        ''' A function to efficiently produce a dictionary of particle indexes from EAGLE particle data arrays
            for SUBFIND subgroups.

        :param sim:        Path to the snapshot file [str]
        :param snapshot:   Snapshot identifier [str]
        :param part_type:  The integer representing the particle type
                           (0, 1, 4, 5: gas, dark matter, stars, black hole) [int]
        :param all_parts:  Flag for whether to use all particles (SNAP group)
                           or only particles in halos (PARTDATA group)  [bool]
        :param sorted:     Flag for whether to produce indices in a sorted particle ID array
                           or unsorted (order they are stored in) [bool]
        :return:
        '''

        # Get the particle IDs for this particle type using eagle_IO
        if all_parts:

            # Get all particles in the simulation
            part_ids = E.read_array('SNAP',
                                    sim,
                                    snapshot,
                                    'PartType' + str(part_type) +
                                    '/ParticleIDs',
                                    numThreads=8)

            # Get only those particles in a halo
            group_part_ids = E.read_array('PARTDATA',
                                          sim,
                                          snapshot,
                                          'PartType' + str(part_type) +
                                          '/ParticleIDs',
                                          numThreads=8)

        else:

            # Get only those particles in a halo
            part_ids = E.read_array('PARTDATA',
                                    sim,
                                    snapshot,
                                    'PartType' + str(part_type) +
                                    '/ParticleIDs',
                                    numThreads=8)

            # A copy of this array is needed for the extraction method
            group_part_ids = np.copy(part_ids)

        # Extract the group ID and subgroup ID each particle is contained within
        grp_ids = E.read_array('PARTDATA',
                               sim,
                               snapshot,
                               'PartType' + str(part_type) + '/GroupNumber',
                               numThreads=8)
        subgrp_ids = E.read_array('PARTDATA',
                                  sim,
                                  snapshot,
                                  'PartType' + str(part_type) +
                                  '/SubGroupNumber',
                                  numThreads=8)

        # Remove particles not associated to a subgroup (subgroupnumber == 2**30 == 1073741824)
        okinds = subgrp_ids != 1073741824
        group_part_ids = group_part_ids[okinds]
        grp_ids = grp_ids[okinds]
        subgrp_ids = subgrp_ids[okinds]

        # Ensure no subgroup ID exceeds 99999
        assert subgrp_ids.max(
        ) < 99999, "Found too many subgroups, need to increase subgroup format string above %05d"

        # Convert IDs to float(groupNumber.SubGroupNumber) format, i.e. group 1 subgroup 11 = 1.00011
        halo_ids = np.zeros(grp_ids.size, dtype=float)
        for (ind, g), sg in zip(enumerate(grp_ids), subgrp_ids):
            halo_ids[ind] = float(str(int(g)) + '.%05d' % int(sg))

        parts_in_groups, part_groups = self._get_part_inds(
            halo_ids, part_ids, group_part_ids, sorted)

        # Produce a dictionary containing the index of particles in each halo
        halo_part_inds = {}
        for ind, grp in zip(parts_in_groups, part_groups):
            halo_part_inds.setdefault(grp, set()).update({ind})

        # Now the dictionary is fully populated convert values from sets to arrays for indexing
        for key, val in halo_part_inds.items():
            halo_part_inds[key] = np.array(list(val))

        return halo_part_inds
コード例 #11
0
ファイル: flares.py プロジェクト: aswinpvijayan/flares
    def get_group_part_inds(self,
                            sim,
                            snapshot,
                            part_type,
                            all_parts=False,
                            sorted=False):
        ''' A function to efficiently produce a dictionary of particle indexes from EAGLE particle data arrays
            for SUBFIND groups.

        :param sim:        Path to the snapshot file [str]
        :param snapshot:   Snapshot identifier [str]
        :param part_type:  The integer representing the particle type
                           (0, 1, 4, 5: gas, dark matter, stars, black hole) [int]
        :param all_parts:  Flag for whether to use all particles (SNAP group)
                           or only particles in halos (PARTDATA group)  [bool]
        :param sorted:     Flag for whether to produce indices in a sorted particle ID array
                           or unsorted (order they are stored in) [bool]
        :return:
        '''

        # Get the particle IDs for this particle type using eagle_IO
        if all_parts:

            # Get all particles in the simulation
            part_ids = E.read_array('SNAP',
                                    sim,
                                    snapshot,
                                    'PartType' + str(part_type) +
                                    '/ParticleIDs',
                                    numThreads=8)

            # Get only those particles in a halo
            group_part_ids = E.read_array('PARTDATA',
                                          sim,
                                          snapshot,
                                          'PartType' + str(part_type) +
                                          '/ParticleIDs',
                                          numThreads=8)

        else:

            # Get only those particles in a halo
            part_ids = E.read_array('PARTDATA',
                                    sim,
                                    snapshot,
                                    'PartType' + str(part_type) +
                                    '/ParticleIDs',
                                    numThreads=8)

            # A copy of this array is needed for the extraction method
            group_part_ids = np.copy(part_ids)

        # Extract the group ID and subgroup ID each particle is contained within
        grp_ids = E.read_array('PARTDATA',
                               sim,
                               snapshot,
                               'PartType' + str(part_type) + '/GroupNumber',
                               numThreads=8)

        # Remove particles in unbound groups (groupnumber < 0)
        okinds = grp_ids < 0
        group_part_ids = group_part_ids[okinds]
        grp_ids = grp_ids[okinds]

        parts_in_groups, part_groups = self._get_part_inds(
            grp_ids, part_ids, group_part_ids, sorted)

        # Produce a dictionary containing the index of particles in each halo
        halo_part_inds = {}
        for ind, grp in zip(parts_in_groups, part_groups):
            halo_part_inds.setdefault(grp, set()).update({ind})

        # Now the dictionary is fully populated convert values from sets to arrays for indexing
        for key, val in halo_part_inds.items():
            halo_part_inds[key] = np.array(list(val))

        return halo_part_inds
コード例 #12
0
df = pd.read_csv('../weights_cdf.txt')
weight_cdf = np.array(df['weights'])

out_grids = np.zeros(len(bincen))
out_cdf = np.zeros(len(bincen))

for ii, jj in enumerate(sims):

    num = str(jj)
    if len(num) == 1:
        num = '0' + num
    sim = '/cosma7/data/dp004/dc-payy1/G-EAGLE/GEAGLE_{}/data'.format(num)
    Mcrit200 = E.read_array('SUBFIND',
                            sim,
                            tag,
                            '/FOF/Group_M_Crit200',
                            numThreads=4,
                            noH=True,
                            physicalUnits=True) * 1e10

    hist, edges = np.histogram(np.log10(Mcrit200), bins)

    out_grids += hist * weight_grids[ii] / (binwidth * ((4 / 3) * np.pi *
                                                        (14 / h)**3))
    out_cdf += hist * weight_cdf[ii] / (binwidth * ((4 / 3) * np.pi *
                                                    (14 / h)**3))

fig, axs = plt.subplots(nrows=1,
                        ncols=1,
                        figsize=(10, 10),
                        sharex=True,
コード例 #13
0
def single_sphere(reg, snap, soft, num, runall=True):

    if not runall:
        if 'gas_animationdata_reg' + reg + '_snap' + snap + '_angle%05d.npy'%num in os.listdir('animationdata/') and \
                'dm_animationdata_reg' + reg + '_snap' + snap + '_angle%05d.npy'%num in os.listdir('animationdata/'):
            return

    # Define path
    path = '/cosma/home/dp004/dc-rope1/FLARES/FLARES-1/G-EAGLE_' + reg + '/data'

    # Get centres of groups
    grp_cops = E.read_array('SUBFIND',
                            path,
                            snap,
                            'FOF/GroupCentreOfPotential',
                            noH=True,
                            numThreads=8)
    grp_ms = E.read_array('SUBFIND',
                          path,
                          snap,
                          'FOF/GroupMass',
                          noH=True,
                          numThreads=8)

    # Get the spheres centre
    centre, radius, mindist = spherical_region(path, snap)

    # Define targets
    sinds = np.argsort(grp_ms)
    grp_cops = grp_cops[sinds]
    targets = [[0, 0, 0]]
    targets.append(grp_cops[0, :] - centre)
    targets.append(grp_cops[1, :] - centre)

    # Define the box size
    lbox = (15 / 0.677) * 2

    # Define anchors dict for camera parameters
    anchors = {}
    anchors['sim_times'] = [
        0.0, 'same', 'same', 'same', 'same', 'same', 'same', 'same'
    ]
    anchors['id_frames'] = [0, 45, 188, 210, 232, 375, 420, 500]
    anchors['id_targets'] = [0, 'pass', 2, 'pass', 'pass', 'pass', 'pass', 0]
    anchors['r'] = [
        lbox * 3 / 4, 'pass', lbox / 100, 'same', 'pass', 'pass', 'pass',
        lbox * 3 / 4
    ]
    anchors['t'] = [0, 'pass', 'pass', -180, 'pass', -270, 'pass', -360]
    anchors['p'] = [0, 'pass', 'pass', 'pass', 'pass', 'pass', 'pass', 360 * 3]
    anchors['zoom'] = [
        1., 'same', 'same', 'same', 'same', 'same', 'same', 'same'
    ]
    anchors['extent'] = [
        10, 'same', 'same', 'same', 'same', 'same', 'same', 'same'
    ]

    # Define the camera trajectory
    data = camera_tools.get_camera_trajectory(targets, anchors)

    # Get images
    rgb_DM, extent = getimage(path, snap, soft, num, centre, data, part_type=1)
    rgb_gas, _ = getimage(path, snap, soft, num, centre, data, part_type=0)

    blend = Blend.Blend(rgb_DM, rgb_gas)
    rgb_output = blend.Overlay()

    fig = plt.figure(figsize=(4, 4))
    ax = fig.add_subplot(111)

    ax.imshow(rgb_output, extent=extent, origin='lower')
    ax.tick_params(axis='both',
                   left=False,
                   top=False,
                   right=False,
                   bottom=False,
                   labelleft=False,
                   labeltop=False,
                   labelright=False,
                   labelbottom=False)

    fig.savefig('plots/spheres/All/all_parts_animation_reg' + reg + '_snap' +
                snap + '_angle%05d.png' % num,
                bbox_inches='tight')
    plt.close(fig)
コード例 #14
0
import eagle_IO.eagle_IO as E
import numpy as np
import h5py
from scipy.spatial.distance import cdist

directory = '/cosma5/data/Eagle/ScienceRuns/Planck1/L0100N1504/PE/REFERENCE/data'
tag = '027_z000p101'

p_sgrpn = E.read_array('PARTDATA',
                       directory,
                       tag,
                       '/PartType4/SubGroupNumber',
                       noH=True,
                       physicalUnits=False,
                       numThreads=1)
p_grpn = E.read_array('PARTDATA',
                      directory,
                      tag,
                      '/PartType4/GroupNumber',
                      noH=True,
                      physicalUnits=False,
                      numThreads=1)
p_imass = E.read_array('PARTDATA',
                       directory,
                       tag,
                       '/PartType4/InitialMass',
                       noH=True,
                       physicalUnits=False,
                       numThreads=1) * 1e10
p_form = E.read_array('PARTDATA',
                      directory,