Example #1
0
def read_region_index(basedir, snapnum, region, itype, datasets):
    """
    Read in the specified data from an Eagle snapshot

    basedir: directory with the simulation data
    snapnum: which snapshot to read
    region:  coordinate range (xmin,xmax,ymin,ymax,zmin,zmax)
    itype:   particle type (integer, 0-5)
    datasets: HDF5 dataset names for the quantities to read

    This version uses the read_eagle module.
    """

    # Open the file and select the region
    snap = read_eagle.EagleSnapshot(eagle_snapshot_name(basedir, snapnum, 0))
    snap.select_region(region[0], region[1], region[2], region[3], region[4],
                       region[5])

    # Read the particles
    data = {}
    pos = snap.read_dataset(itype, "Coordinates")
    for name in datasets:
        data[name] = snap.read_dataset(itype, name)

    # Filter out any extra particles
    in_region = in_region_periodic(pos, region, snap.boxsize)
    pos = pos[in_region, ...]
    for name in datasets:
        data[name] = data[name][in_region, ...]

    return data
Example #2
0
def parts_in_region(basePath,snapnum, partType, centre: 'cMpc/h', region_length:'cMpc/h', fields):
    #print('\n##\n#region_length is box\'s length of selected region, not radius',)
    #fname   = basePath+"/snapshot_0%d_z000p000/snap_0%d_z000p000.0.hdf5" % (snapnum, snapnum)
    fname   = eagle.snapshot.snapPath(basePath,snapnum)
    itype = partType
    result = {}
    # make sure fields is not a single element
    if isinstance(fields, str):
        fields = [fields]
    
    # Open the snapshot
    snap = read_particle.EagleSnapshot(fname)
    # Specify the region to read (coords. are in comoving Mpc/h)
    xmin = centre[0]- 0.5*region_length
    xmax = centre[0]+ 0.5*region_length
    ymin = centre[1]- 0.5*region_length
    ymax = centre[1]+ 0.5*region_length
    zmin = centre[2]- 0.5*region_length
    zmax = centre[2]+ 0.5*region_length
    snap.select_region(xmin, xmax, ymin, ymax, zmin, zmax)
    
    
    #print ("# Number of particles in this region = %d" % snap.count_particles(itype))
    # Read positions and IDs of particles of type itype in the specified region.
    for field in fields:
        result[field]= snap.read_dataset(itype, field)
    snap.close()
    return result
def load(snapshot_path):
    """
    Loads the data required.
    """

    snap = read_eagle.EagleSnapshot(snapshot_path)

    # Read whole volume...
    snap.select_region(0, 100, 0, 100, 0, 100)

    eff = snap.read_dataset(4, "Feedback_EnergyFraction")
    z = snap.redshift

    return z, eff
def extract_region(fname, sample_rate, region, datasets, output, types):
    """
    Extract the specified datasets for particles in the cuboid
    region specified by

    region[0] < x < region[1]
    region[2] < y < region[3]
    region[4] < z < region[5]

    Particles are sampled at a rate given by sample_rate.
    Output is written to the file output.
    """

    # Open the input file
    snap = read_eagle.EagleSnapshot(fname)

    # Will read whole box if region is not specified
    if region is None:
        region = (0, snap.boxsize, 0, snap.boxsize, 0, snap.boxsize)

    # Will do all particle types if not specified
    if types is None:
        types = (1, 1, 1, 1, 1, 1)

    # Get rid of extra slashes in dataset names
    if datasets is not None:
        datasets = [d.strip("/") for d in datasets]

    # Create the output file
    output = h5py.File(output, "w")

    # Select region of interest
    snap.select_region(*region)

    # Number of particles in the output
    numpart_total = zeros(6, dtype=uint64)

    # Loop over particle types to process
    for itype in range(6):

        # Check if we're doing this type
        if types[itype] != 0:

            # Get number of particles to read
            np = snap.count_particles(itype)
            if np > 0:

                # Decide which particles of this type to keep
                if sample_rate is not None:
                    ind = (random.rand(np) < sample_rate)
                    numpart_total[itype] = sum(ind)
                    print()
                    print("Particle type ", itype, ", keeping ", sum(ind),
                          " of ", len(ind), " in region")
                    print()
                else:
                    numpart_total[itype] = np
                    print()
                    print("Particle type ", itype, ", keeping all ", np,
                          " in region")
                    print()

                # May be none left after sampling!
                if numpart_total[itype] > 0:

                    # Create the output group
                    out_group = output.create_group("PartType%d" % itype)

                    # Decide which datasets to do
                    if datasets is None:
                        # If list is not supplied, do them all
                        read_datasets = snap.datasets(itype)
                    else:
                        # Do any datasets which are in the supplied list
                        read_datasets = []
                        file_datasets = [
                            d.strip("/") for d in snap.datasets(itype)
                        ]
                        for dset in datasets:
                            if dset in file_datasets:
                                read_datasets.append(dset)

                    # Loop over datasets to do
                    for dset_name in read_datasets:

                        print("  Dataset ", dset_name)

                        # May need to create intermediate groups
                        # (for element abundances etc)
                        create_output_groups(out_group, dset_name)

                        # Read this dataset
                        data = snap.read_dataset(itype, dset_name)

                        # Random sample the particles
                        if sample_rate is not None:
                            data = data[ind]

                        # Get chunk size for output
                        chunks = [s for s in data.shape]
                        chunks[0] = min((chunk_size, chunks[0]))

                        # Write the dataset
                        out_group.create_dataset(dset_name.strip("/"),
                                                 data=data,
                                                 chunks=tuple(chunks),
                                                 shuffle=True,
                                                 compression="gzip",
                                                 compression_opts=gzip_level)

    # Close the input snapshot
    snap.close()

    # Reopen input file with h5py to copy header, parameters etc
    infile = h5py.File(fname, "r")

    # Copy the header from the input.
    header = output.create_group("Header")
    for (name, val) in infile["Header"].attrs.items():
        header.attrs[name] = val

    # Update particle numbers in header
    nptot = zeros(6, dtype=uint32)
    nptot_hw = zeros(6, dtype=uint32)
    nptot_hw[:] = numpart_total >> 32
    nptot[:] = numpart_total - (nptot_hw << 32)
    header.attrs["NumPart_Total"] = nptot
    header.attrs["NumPart_Total_HighWord"] = nptot_hw
    header.attrs["NumPart_ThisFile"] = nptot

    # Now only have a single file
    header.attrs["NumFilesPerSnapshot"] = 1

    # Copy other groups with run information
    for group_name in ("Config", "Constants", "Parameters",
                       "Parameters/ChemicalElements", "RuntimePars", "Units"):
        group = output.create_group(group_name)
        for (name, val) in infile[group_name].attrs.items():
            group.attrs[name] = val

    # Add the sampling rate to the header
    if sample_rate is not None:
        header.attrs["SamplingRate"] = sample_rate
    else:
        header.attrs["SamplingRate"] = 1.0

    # Add name of the original file to the header
    header.attrs["ExtractedFromSnapshot"] = fname

    # Add region spec and type flags
    header.attrs["RegionExtracted"] = asarray(region, dtype=float64)
    header.attrs["TypesExtracted"] = asarray(types, dtype=int32)

    # Close the input file
    infile.close()

    # Close the output file
    output.close()
    def select(self, centre, region_size):  # Region size in Mpc
        if not self.quiet:
            print 'Loading region...'

        code_centre = centre * self.h / (
            self.a_0 * 1e3)  # convert to h-less comoving code units
        code_region_size = region_size * self.h / self.a_0

        centre_mpc = centre / 1e3

        # Point read_eagle to the data
        snapfile = self.sim_path + 'snapshot_' + self.tag + '/snap_' + self.tag + '.0.hdf5'

        # Open snapshot
        snap = read.EagleSnapshot(snapfile)
        # Select region of interest
        snap.select_region(code_centre[0] - code_region_size / 2.,
                           code_centre[0] + code_region_size / 2.,
                           code_centre[1] - code_region_size / 2.,
                           code_centre[1] + code_region_size / 2.,
                           code_centre[2] - code_region_size / 2.,
                           code_centre[2] + code_region_size / 2.)

        if self.property == 'stars':
            pos = snap.read_dataset(4, 'Coordinates') * self.a_0 / self.h
            smoothing_length = snap.read_dataset(
                4, 'SmoothingLength') * self.a_0 / self.h
            quantity = snap.read_dataset(4, 'Mass') / self.h * 1e10

        else:
            pos = snap.read_dataset(0, 'Coordinates') * self.a_0 / self.h
            smoothing_length = snap.read_dataset(
                0, 'SmoothingLength') * self.a_0 / self.h

            if self.property == 'gas':
                quantity = snap.read_dataset(0, 'Mass') / self.h / 1e10

            elif self.property == 'xrays':
                pids = snap.read_dataset(0, 'ParticleIDs')
                quantity = self.xrays[np.searchsorted(self.xray_pids, pids)]

            elif self.property == 'entropy':
                m_H_cgs = 1.6737e-24
                weight = snap.read_dataset(0, 'Mass') / self.h / 1e10

                temp = snap.read_dataset(0, 'Temperature')

                abunds = np.zeros((len(temp), 11))
                abunds[:, 0] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Hydrogen")
                abunds[:, 1] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Helium")
                abunds[:, 2] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Carbon")
                abunds[:, 3] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Nitrogen")
                abunds[:, 4] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Oxygen")
                abunds[:,
                       5] = snap.read_dataset(0,
                                              "SmoothedElementAbundance/Neon")
                abunds[:, 6] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Magnesium")
                abunds[:, 7] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Silicon")
                abunds[:, 8] = abunds[:, 7] * 0.6054160
                abunds[:, 9] = abunds[:, 7] * 0.0941736
                abunds[:, 10] = snap.read_dataset(
                    0, "SmoothedElementAbundance/Iron")

                atomic_numbers = np.array(
                    [1., 2., 6., 7., 8., 10., 12., 14., 16., 20., 26.])
                Xe = np.ones(len(abunds[:, 0]))
                num_ratios = np.zeros(np.shape(abunds))
                for col in range(len(abunds[0, :])):
                    num_ratios[:, col] = abunds[:, col] / abunds[:, 0]
                for element in range(len(abunds[0, :]) - 1):
                    Xe += num_ratios[:,
                                     element + 1] * atomic_numbers[element + 1]

                density = snap.read_dataset(0, 'Density')
                n_H = density * abunds[:, 0] / m_H_cgs  # convert into nH cm^-3
                n_e = n_H * Xe  # electron density in cm^-3

                quantity = temp / np.power(n_e, 2. / 3.)

            else:
                raise IOError(
                    'Plot options are "gas","ion", stars" or "xrays"')

        if not self.quiet:
            print 'Wrapping box...'
        pos = ne.evaluate("pos-centre_mpc")
        pos[pos[:, 0] < (-1. * self.boxsize / 2.), 0] += self.boxsize
        pos[pos[:, 1] < (-1. * self.boxsize / 2.), 1] += self.boxsize
        pos[pos[:, 2] < (-1. * self.boxsize / 2.), 2] += self.boxsize
        pos[pos[:, 0] > self.boxsize / 2., 0] -= self.boxsize
        pos[pos[:, 1] > self.boxsize / 2., 1] -= self.boxsize
        pos[pos[:, 2] > self.boxsize / 2., 2] -= self.boxsize
        pos = ne.evaluate("pos+centre_mpc")

        # read_eagle loads in more than we actually asked for above. We need to mask to the region size again!
        posmask = np.where(
            (np.absolute(pos[:, 0] - centre_mpc[0]) < region_size / 2.)
            & (np.absolute(pos[:, 1] - centre_mpc[1]) < region_size / 2.)
            & (np.absolute(pos[:, 2] - centre_mpc[2]) < region_size / 2.))[0]

        pos = pos[posmask, :]
        smoothing_length = smoothing_length[posmask]
        quantity = quantity[posmask]

        N = len(quantity)

        pos *= 1e3  # convert to kpc
        smoothing_length *= 1e3
        if not self.quiet:
            print 'Creating scene...'

        if self.property in [
                'entropy',
        ]:
            weight = weight[posmask]
            Particles = sphviewer.Particles(pos, weight, hsml=smoothing_length)
            self.weightScene = sphviewer.Scene(Particles)
            Particles = sphviewer.Particles(pos,
                                            quantity * weight,
                                            hsml=smoothing_length)
            self.propScene = sphviewer.Scene(Particles)

        else:

            if pos.size == 0:
                self.Scene = None

            else:
                Particles = sphviewer.Particles(pos,
                                                quantity,
                                                hsml=smoothing_length)
                self.Scene = sphviewer.Scene(Particles)
Example #6
0
basedir = "/gpfs/data/jch/tmp/"

# The snapshot to read is identified by specifying the name of one of the snapshot files
fname = basedir + "/RefL0012N0188/snapshot_028_z000p000/snap_028_z000p000.0.hdf5"

#
# Particle type to read. Particle types are:
#   0 = Gas
#   1 = Dark matter
#   4 = Stars
#   5 = Black holes
#
itype = 0

# Open the snapshot
snap = read_eagle.EagleSnapshot(fname)

print("# Box size = %16.8e Mpc/h" % snap.boxsize)
print("#")
print("# Total number of gas  particles in snapshot = %d" %
      snap.numpart_total[0])
print("# Total number of DM   particles in snapshot = %d" %
      snap.numpart_total[1])
print("# Total number of star particles in snapshot = %d" %
      snap.numpart_total[4])
print("# Total number of BH   particles in snapshot = %d" %
      snap.numpart_total[5])

# Specify the region to read (coords. are in comoving Mpc/h)
xmin = 0.0
xmax = 2.0
    def select(self,centre,region_size): # Region size in Mpc
        
        print 'Loading region...'
        code_centre = centre * self.h/(self.a_0*1e3) # convert to h-less comoving code units
        region_size *= self.h/self.a_0

        # Point read_eagle to the data
        snapfile = self.sim_path + 'snapshot_' + self.tag + '/snap_' + self.tag + '.0.hdf5'

        # Open snapshot
        snap = read.EagleSnapshot(snapfile)
        # Select region of interest
        snap.select_region(code_centre[0]-region_size/2.,
                            code_centre[0]+region_size/2.,
                            code_centre[1]-region_size/2.,
                            code_centre[1]+region_size/2.,
                            code_centre[2]-region_size/2.,
                            code_centre[2]+region_size/2.)

        if self.property == 'stars':
            pos = snap.read_dataset(4,'Coordinates') * self.a_0/self.h
            smoothing_length = snap.read_dataset(4, 'SmoothingLength') * self.a_0 / self.h
            quantity = snap.read_dataset(4, 'Mass') / self.h * 1e10
        else:
            pos = snap.read_dataset(0, 'Coordinates') * self.a_0 / self.h
            smoothing_length = snap.read_dataset(0, 'SmoothingLength') * self.a_0 / self.h

            if self.property == 'gas':
                quantity = snap.read_dataset(0, 'Mass') / self.h / 1e10
            elif self.property == 'xrays':
                pids = snap.read_dataset(0, 'ParticleIDs')
                quantity = self.xrays[np.searchsorted(self.xray_pids,pids)]
            else:
                raise IOError('Plot options are "gas","stars" or "xrays"')

        
        print 'Wrapping box...'
        region_size /= self.h/self.a_0
        centre_mpc = centre /1e3
        pos = ne.evaluate("pos-centre_mpc")
        pos[pos[:,0]<(-1.*self.boxsize/2.),0] += self.boxsize
        pos[pos[:,1]<(-1.*self.boxsize/2.),1] += self.boxsize
        pos[pos[:,2]<(-1.*self.boxsize/2.),2] += self.boxsize
        pos[pos[:,0]>self.boxsize/2.,0] -= self.boxsize
        pos[pos[:,1]>self.boxsize/2.,1] -= self.boxsize
        pos[pos[:,2]>self.boxsize/2.,2] -= self.boxsize
        pos = ne.evaluate("pos+centre_mpc")
        



        pos = pos.T

        N = len(quantity)

        pos *= 1e3  # convert to kpc
        smoothing_length *= 1e3

        print 'Creating scene...'
        Particles = sphviewer.Particles(pos, quantity, hsml=smoothing_length)
        self.Scene = sphviewer.Scene(Particles)
    def __init__(self, prop,
                 sim='L0100N1504',
                 run='REFERENCE',
                 snapnum=28):
        print 'Initialising box for imaging...'

        tag = snapdict[str(snapnum)][0]

        sim_path = '/data5/simulations/EAGLE/' + sim + '/' + run + '/data/'

        # Get volume information
        boxsize = E.readAttribute('SNAP', sim_path, tag, "/Header/BoxSize")
        h = E.readAttribute('SNAP', sim_path, tag, "/Header/HubbleParam")
        a_0 = E.readAttribute('SNAP', sim_path, tag, "/Header/ExpansionFactor")

        # Point read_eagle to the data
        snapfile = sim_path + 'snapshot_' + tag + '/snap_' + tag + '.0.hdf5'
        comm = MPI.COMM_WORLD
        comm_rank = comm.Get_rank()
        comm_size = comm.Get_size()
        # Open snapshot
        snap = read.EagleSnapshot(snapfile)
        # Select region of interest
        snap.select_region(0.,boxsize,0.,boxsize,0.,boxsize)
        # Split selection between processors
        # This assigns an equal number of hash cells to each processor.
        snap.split_selection(comm_rank,comm_size)

        if prop == 'stars':
            #pos = load_array('Coordinates', 4, sim=sim, run=run, tag=tag).T
            #smoothing_length = load_array('SmoothingLength', 4, sim=sim, run=run, tag=tag)
            #quantity = load_array('Mass', 4, sim=sim, run=run, tag=tag) * 1e10

            pos = snap.read_dataset(4,'Coordinates') * a_0/h
            pos = pos.T
            smoothing_length = snap.read_dataset(4, 'SmoothingLength') * a_0 / h
            quantity = snap.read_dataset(4, 'Mass') / h * 1e10
        else:
            #pos = load_array('Coordinates', 0, sim=sim, run=run, tag=tag).T
            #smoothing_length = load_array('SmoothingLength', 0, sim=sim, run=run, tag=tag)

            pos = snap.read_dataset(0, 'Coordinates') * a_0 / h
            print pos
            pos = pos.T
            smoothing_length = snap.read_dataset(0, 'SmoothingLength') * a_0 / h

            if prop == 'gas':
                quantity = snap.read_dataset(0, 'Mass') / h / 1e10
                print quantity
            elif prop == 'xrays':
                pids = snap.read_dataset(0, 'ParticleIDs')
                print 'Matching x-rays to particles'

                xray_data = h5.File('/data6/arijdav1/Lx_matching/'+sim+'_'+run+'/'+tag+'.hdf5','r')
                xrays = np.array(xray_data['Xray_luminosity']) / 1e30
                xray_pids = np.array(xray_data['ParticleIDs'])
                #match_sort = np.argsort(xray_pids)
                #xrays = xrays[match_sort]
                #xray_pids = xray_pids[match_sort]

                quantity = xrays[np.searchsorted(xray_pids,pids)]


            else:
                raise IOError('Plot options are "gas","stars" or "xrays"')

        N = len(quantity)

        pos *= 1e3  # convert to kpc
        smoothing_length *= 1e3

        print N

        Particles = sphviewer.Particles(pos, quantity, hsml=smoothing_length)

        print Particles.get_pos()
        print Particles.get_mass()
        print Particles.get_hsml()

        self.Scene = sphviewer.Scene(Particles)

        self.sim = sim
        self.run = run
        self.tag = tag
        self.property = prop
        self.boxsize = boxsize / h
Example #9
0
galaxy = sys.argv[3]

z_padded = "{:07.3f}".format(float(z))
a = z_padded.split('.')
snap_z = 'z'+a[0]+'p'+a[1]
fname = 'snap_'+"{:03d}".format(int(snap_num))+'_'+snap_z+'.0.hdf5'
dirname = 'snapshot_'+"{:03d}".format(int(snap_num))+'_'+snap_z

IDs = pd.read_csv('/orange/narayanan/s.lower/eagle/filtered_snapshots/galaxy_lists/snap'+snap_num+'_halo_galaxy.csv')
group = IDs['GroupNumber'][int(galaxy)]
subgroup = IDs['SubGroupNumber'][int(galaxy)]

print('Read in galaxy ID')

snap_dir = '/orange/narayanan/s.lower/eagle/m50n752/snapshots/RefL0050N0752/'+dirname
snap = read_eagle.EagleSnapshot(snap_dir+'/'+fname)

snap.select_region(0, 50. * 0.6777, 0, 50. * 0.6777, 0, 50. * 0.6777)
snap.split_selection(comm_rank, comm_size)
print('Selected region')
outdir = '/orange/narayanan/s.lower/eagle/filtered_snapshots/snap023/'
output_file = h5py.File(outdir+'/galaxy_'+str(galaxy)+'.hdf5', 'w')
#output_file = h5py.File('/orange/narayanan/s.lower/eagle/filtered_snapshots/snap'+snap_num+'/galaxy_'+str(galaxy)+'.hdf5', 'w')
input1 = h5py.File(snap_dir+'/'+fname, 'r')
#output_file.copy(input1['Header'], 'Header')

attrs = ['Config',
 'Constants',
 'HashTable',
 'Header',
 'Parameters',
import read_eagle
from mpi4py import MPI
import matplotlib.pyplot as plt
import numpy as np

comm = MPI.COMM_WORLD
comm_rank = comm.Get_rank()
comm_size = comm.Get_size()

# Name of one file from the snapshot
snapfile = "/data5/simulations/EAGLE/L0100N1504/REFERENCE/data/snapshot_028_z000p000/snap_028_z000p000.0.hdf5"

print 'Opening snapshot'
# Open snapshot
snap = read_eagle.EagleSnapshot(snapfile)

# Select region of interest
snap.select_region(0, 10, 0, 10, 0, 10)

print 'Splitting process'
# Split selection between processors
# This assigns an equal number of hash cells to each processor.
snap.split_selection(comm_rank, comm_size)

print 'Reading data'
# Read data - each processor will receive a part of the selected region
density = snap.read_dataset(0, "Density")
temperature = snap.read_dataset(0, "Temperature")
pos = snap.read_dataset(0, "Coordinates")
ids = snap.read_dataset(0, "ParticleIDs")
Example #11
0
# Line arguments
#############
path_to_snapshots = sys.argv[1]
galaxy_list_path = sys.argv[2]
output_path = sys.argv[3]
galaxy_number = sys.argv[4]
####################

IDs = pd.read_csv(galaxy_list_path)
group = IDs['GroupNumber'][int(galaxy_number)]
subgroup = IDs['SubGroupNumber'][int(galaxy_number)]
print('Read in galaxy ID')

snap_dir = path_to_snapshots
snapshot_0 = glob.glob(snap_dir + '/snap_*_z*.0.hdf5')
snap = read_eagle.EagleSnapshot(snapshot_0)

#find box size
try:
    start = snap_dir.find('RefL') + 4
    end = snap_dir.find('N', start)
    box_size = float(snap_dir[start:end])
except:
    print('Need to input boxsize')
    sys.exit()

snap.select_region(0, box_size * 0.6777, 0, box_size * 0.6777, 0,
                   box_size * 0.6777)
snap.split_selection(comm_rank, comm_size)
print('Selected region')
output_file = h5py.File(output_path + '/galaxy_' + str(galaxy) + '.hdf5', 'w')
Example #12
0
def gen_particle_history_serial(base_halo_data, snaps=None):
    """

    gen_particle_history_serial : function
	----------

    Generate and save particle history data from VELOCIraptor property and particle files.

	Parameters
	----------
    base_halo_data : list of dictionaries
        The halo data list of dictionaries previously generated (by gen_base_halo_data). Should contain information
        re: the type of particle file be reading. 

    snaps : list of ints
        The list of absolute snaps (corresponding to index in base_halo_data) for which we will add 
        particles in halos or subhalos (and save accordingly). The running lists will build on the previous snap. 

	Returns
	----------
    None.

    Saves to file:
    PartHistory_xxx-outname.hdf5 : hdf5 file with datasets

        '/PartTypeX/ParticleIDs' - SORTED particle IDs from simulation.
        '/PartTypeX/ParticleIndex' - Corresponding indices of particles. 
        '/PartTypeX/HostStructure' - Host structure (from STF) of particles. (-1: no host structure)
    
	"""

    # If not given snaps, do for all snaps in base_halo_data (can deal with padded snaps)
    if snaps == None:
        snaps = list(range(len(base_halo_data)))

    # Find which snaps are valid/not padded and which aren't
    try:
        valid_snaps = [len(base_halo_data[snap].keys()) > 3
                       for snap in snaps]  #which indices of snaps are valid
        valid_snaps = np.compress(valid_snaps, snaps)
        run_outname = base_halo_data[valid_snaps[0]]['outname']

    except:
        print("Couldn't validate snaps")
        return []

    # Standard names of particle types
    PartNames = ['Gas', 'DM', '', '', 'Star', 'BH']

    # Which simulation type do we have?
    if base_halo_data[valid_snaps[0]]['Part_FileType'] == 'EAGLE':
        PartTypes = [0, 1, 4, 5]  #Gas, DM, Stars, BH
        SimType = 'EAGLE'
    else:
        PartTypes = [0, 1]  #Gas, DM
        SimType = 'OtherHydro'

    # If the directory with particle histories doesn't exist yet, make it (where we have run the python script)
    if not os.path.isdir("part_histories"):
        os.mkdir("part_histories")

    # Iterate through snapshots and flip switches as required
    isnap = 0
    for snap in valid_snaps:
        # Initialise output file (will be truncated if already exists)
        outfile_name = "part_histories/PartHistory_" + str(snap).zfill(
            3) + "_" + run_outname + ".hdf5"
        if os.path.exists(outfile_name):
            os.remove(outfile_name)
        outfile = h5py.File(outfile_name, 'w')

        # Load the EAGLE data for this snapshot
        if SimType == 'EAGLE':
            t1 = time.time()
            EAGLE_boxsize = base_halo_data[snap]['SimulationInfo'][
                'BoxSize_Comoving']
            EAGLE_Snap = read_eagle.EagleSnapshot(
                base_halo_data[snap]['Part_FilePath'])
            EAGLE_Snap.select_region(xmin=0,
                                     xmax=EAGLE_boxsize,
                                     ymin=0,
                                     ymax=EAGLE_boxsize,
                                     zmin=0,
                                     zmax=EAGLE_boxsize)
            t2 = time.time()
            print(
                f"Loaded and sliced EAGLE data from snapshot {snap} in {t2-t1:.2f} sec"
            )

        # Load the halo particle lists for this snapshot for each particle type
        t1 = time.time()
        snap_fof_particle_data = get_FOF_particle_lists(
            base_halo_data, snap
        )  #don't need to add subhalo particles as we have each subhalo separately
        if not type(snap_fof_particle_data) == dict:
            print(
                f'Skipping histories for snap {snap} - could not retrieve FOF particle lists'
            )
            continue

        # Count halos and particles in each
        n_halos = len(list(snap_fof_particle_data["Particle_IDs"].keys()))
        n_part_ihalo = [
            len(snap_fof_particle_data["Particle_IDs"][str(ihalo)])
            for ihalo in range(n_halos)
        ]
        n_part_tot = np.sum(n_part_ihalo)

        # Store IDs, Types, and assign host IDs
        structure_Particles = {}
        structure_Particles['ParticleIDs'] = np.concatenate([
            snap_fof_particle_data['Particle_IDs'][str(ihalo)]
            for ihalo in range(n_halos)
        ])
        structure_Particles['ParticleTypes'] = np.concatenate([
            snap_fof_particle_data['Particle_Types'][str(ihalo)]
            for ihalo in range(n_halos)
        ])
        del snap_fof_particle_data  #remove the unnecessary fof data to save memory
        structure_Particles['HostStructureID'] = np.concatenate([
            np.ones(n_part_ihalo[ihalo], dtype='int64') * haloid
            for ihalo, haloid in enumerate(base_halo_data[snap]['ID'])
        ])

        t2 = time.time()
        print(
            f"Loaded, concatenated and sorted halo particle lists for snap {snap} in {t2-t1:.2f} sec"
        )

        # Map IDs to indices from particle data, and initialise array
        Particle_History_Flags = dict()
        for itype in PartTypes:
            ###############################################
            ##### Step 1: PARTICLE DATA - SORTING IDs #####
            ###############################################

            # Load new snap data
            if SimType == 'EAGLE':
                try:
                    Particle_IDs_Unsorted_itype = EAGLE_Snap.read_dataset(
                        itype, "ParticleIDs")
                except:
                    print(
                        f'No {PartNames[itype]} PartType{itype} particles found at snap {snap} - skipping to next particle type'
                    )
                    continue
            else:
                h5py_Snap = h5py.File(base_halo_data[snap]['Part_FilePath'])
                Particle_IDs_Unsorted_itype = h5py_Snap['PartType' +
                                                        str(itype) +
                                                        '/ParticleIDs']
            N_Particles_itype = len(Particle_IDs_Unsorted_itype)
            print(
                f'There are n = {N_Particles_itype} PartType{itype} particles loaded for snap {snap}'
            )

            # Sort IDs and initialise hdf5 file with mapped IDs
            print(
                f"Mapping IDs to indices for PartType{itype} particles at snap {snap} ..."
            )
            itype_IDs_argsort = np.argsort(Particle_IDs_Unsorted_itype)
            itype_IDs_sorted = Particle_IDs_Unsorted_itype[(
                itype_IDs_argsort, )]
            del Particle_IDs_Unsorted_itype

            # Dump sorted IDs and particle argsort to hdf5
            outfile.create_dataset(f'/PartType{itype}/ParticleIDs',
                                   dtype=np.int64,
                                   compression='gzip',
                                   data=itype_IDs_sorted)
            outfile.create_dataset(f'/PartType{itype}/ParticleIndex',
                                   dtype=np.int32,
                                   compression='gzip',
                                   data=itype_IDs_argsort)
            del itype_IDs_argsort
            outfile[f'/PartType{itype}/ParticleIDs'].attrs.create(
                'npart', data=N_Particles_itype, dtype=np.int64)

            ###############################################
            ##### Step 2: FOF DATA - ADDING HOSTS IDs #####
            ###############################################
            t1_fof = time.time()
            # Initialise hosts to -1
            itype_hostIDs = np.ones(N_Particles_itype,
                                    dtype='int64') - np.int64(2)
            # Find which structure particles are this type
            itype_structure_mask = np.where(
                structure_Particles["ParticleTypes"] == itype)
            # Find the index of the structure particles in the sorted particle IDs list of this type
            itype_structure_partindex = binary_search(
                sorted_list=itype_IDs_sorted,
                items=structure_Particles["ParticleIDs"][itype_structure_mask])
            del itype_IDs_sorted
            # Add host ID for structure particles
            itype_hostIDs[(
                itype_structure_partindex,
            )] = structure_Particles['HostStructureID'][itype_structure_mask]

            t2_fof = time.time()
            print(
                f"Added host halos in {t2_fof-t1_fof:.2f} sec for PartType{itype} particles"
            )

            # Dump structure IDs and particle argsort to hdf5
            outfile.create_dataset(f'/PartType{itype}/HostStructure',
                                   dtype=np.int64,
                                   compression='gzip',
                                   data=itype_hostIDs)

        outfile.close()
        isnap += 1  #go to next snap

    return None  #Don't return anything just save the data
def load_region(filename,
                region,
                particle_type,
                subsample,
                property="Coordinates"):
    """
    Loads a region (respecting periodic boundary conditions!)
    """

    with h5py.File(filename, "r") as handle:
        boxsize = handle["Header"].attrs["BoxSize"]
        redshift = handle["Header"].attrs["Redshift"]

    if periodic_in_more_than_one_dimension(region, boxsize):
        raise AttributeError(
            "Unable to process periodicity in more than one direction (load_region)"
        )

    snap = read_eagle.EagleSnapshot(filename)

    flat_region = np.array(region).flatten()

    try:
        # First load the particles that we 'know' to be in the region, ignoring
        # periodicity
        snap.select_region(*flat_region)
        read_data = snap.read_dataset(particle_type, property)

        # Now we need to deal with periodicity. First, find the periodic axis.

        for dimension, side in enumerate(region):
            if side[0] < 0.0:
                # Must be periodic on the 'left' side, here we wrap the box
                # around to the 'right' side
                new_region = flat_region
                new_region[dimension] = boxsize + new_region[dimension]
                new_region[dimension + 1] = boxsize
            elif side[1] > boxsize:
                # The periodic side is on the 'right' and we need to wrap back
                # around to the 'left'
                new_region = flat_region
                new_region[dimension] = 0.0
                new_region[dimension + 1] = side[1] - boxsize
            else:
                continue

            # Note that this can only, by definition above, happen once; no need to
            # worry about this happening multiple times and reading from file again
            snap.select_region(*new_region)
            data_new = snap.read_dataset(particle_type, property)

            # If our data is co-ordinates, we need to set the z-axis of that data
            # back to the original co-ordinate system.
            if property == "Coordinates":
                if side[0] < 0.0:
                    data_new[:, dimension] = data_new[:, dimension] - boxsize
                elif side[1] > boxsize:
                    data_new[:, dimension] = data_new[:, dimension] + boxsize

            # We _must_ break otherwise we'll keep adding or taking off the boxsize
            # (bad idea).

            read_data = np.concatenate([read_data, data_new])

            break

    except KeyError:
        read_data = None

    return read_data[::subsample], boxsize, redshift
Example #14
0
def extract(record):

    # ---- get the particle data

    # initialise star and gas dictionaries
    sdat = {}
    gdat = {}
    yngstars = {}
    hiiregions = {}

    # open snapshot and read relevant field attributes
    sfn = snapfilename(record["eaglesim"], record["snaptag"])
    snapshot = read_eagle.EagleSnapshot(sfn)
    params = fieldAttrs(sfn, "Header")
    params.update(fieldAttrs(sfn, "Constants"))
    params.update(fieldAttrs(sfn, "RuntimePars"))
    hubbleparam = params["HubbleParam"]
    expansionfactor = params["ExpansionFactor"]
    schmidtparams = schmidtParameters(params)

    # convert center of potential to snapshot units
    copx = record["copx"] * hubbleparam
    copy = record["copy"] * hubbleparam
    copz = record["copz"] * hubbleparam

    # specify (2*250kpc)^3 physical volume about galaxy centre
    delta = 0.25 * hubbleparam / expansionfactor
    snapshot.select_region(copx - delta, copx + delta, copy - delta,
                           copy + delta, copz - delta, copz + delta)

    # read star particle informaton
    insubhalo = (snapshot.read_dataset(4, "GroupNumber") == record["groupnr"]) & \
                (snapshot.read_dataset(4, "SubGroupNumber") == record["subgroupnr"])
    sdat['r'] = snapshot.read_dataset(4, "Coordinates")[insubhalo]
    sdat['h'] = snapshot.read_dataset(4, "SmoothingLength")[insubhalo]
    sdat['im'] = snapshot.read_dataset(4, "InitialMass")[insubhalo]
    sdat['m'] = snapshot.read_dataset(4, "Mass")[insubhalo]
    sdat['v'] = snapshot.read_dataset(4, "Velocity")[insubhalo]
    sdat['Z'] = snapshot.read_dataset(4, "SmoothedMetallicity")[insubhalo]
    sdat['born'] = snapshot.read_dataset(4, "StellarFormationTime")[insubhalo]
    sdat['rho_born'] = snapshot.read_dataset(4, "BirthDensity")[insubhalo]

    # read gas particle informaton
    insubhalo = (snapshot.read_dataset(0, "GroupNumber") == record["groupnr"]) & \
                (snapshot.read_dataset(0, "SubGroupNumber") == record["subgroupnr"])
    gdat['r'] = snapshot.read_dataset(0, "Coordinates")[insubhalo]
    gdat['h'] = snapshot.read_dataset(0, "SmoothingLength")[insubhalo]
    gdat['m'] = snapshot.read_dataset(0, "Mass")[insubhalo]
    gdat['v'] = snapshot.read_dataset(0, "Velocity")[insubhalo]
    gdat['Z'] = snapshot.read_dataset(0, "SmoothedMetallicity")[insubhalo]
    gdat['T'] = snapshot.read_dataset(0, "Temperature")[insubhalo]
    gdat['rho'] = snapshot.read_dataset(0, "Density")[insubhalo]
    gdat['sfr'] = snapshot.read_dataset(0, "StarFormationRate")[insubhalo]

    # convert units
    sdat['r'] = periodicCorrec(sdat['r'], params["BoxSize"])
    sdat['r'] = toparsec(sdat['r'], hubbleparam, expansionfactor)
    sdat['h'] = toparsec(sdat['h'], hubbleparam, expansionfactor)
    sdat['im'] = tosolar(sdat['im'], hubbleparam)
    sdat['m'] = tosolar(sdat['m'], hubbleparam)
    sdat['t'] = age(sdat['born']) - age(expansionfactor)
    sdat['rho_born'] *= 6.7699e-31
    gdat['r'] = periodicCorrec(gdat['r'], params["BoxSize"])
    gdat['r'] = toparsec(gdat['r'], hubbleparam, expansionfactor)
    gdat['h'] = toparsec(gdat['h'], hubbleparam, expansionfactor)
    gdat['m'] = tosolar(gdat['m'], hubbleparam)
    gdat['rho'] = togcm3(gdat['rho'], hubbleparam, expansionfactor)

    # remember density conversion from g cm^-3 to M_sun Mpc^-3
    densconv = ((params['CM_PER_MPC'] / 1.e6)**3) / params['SOLAR_MASS']

    # calculate the ISM pressure
    sdat['P'] = getPtot(sdat['rho_born'], schmidtparams)
    gdat['P'] = getPtot(gdat['rho'], schmidtparams)

    # calculate stellar center of mass and translational velocity using shrinking aperture technique
    com, v_bar = shrinkingCentroid(sdat['r'], sdat['m'], sdat['v'])

    # find unit rotation axis vector, using only stellar information and an aperture of 30 kpc
    n_rot = rotAxis(sdat['r'],
                    sdat['v'],
                    sdat['m'],
                    com,
                    v_bar,
                    apt=30e3,
                    aptfrac=0.08)

    # translate to center of mass and line up with angular momentum vector
    transf = Transform()
    transf.translate(-com[0], -com[1], -com[2])
    a, b, c = n_rot
    v = np.sqrt(b * b + c * c)
    if v > 0.3:
        transf.rotateX(c / v, -b / v)
        transf.rotateY(v, -a)
    else:
        v = np.sqrt(a * a + c * c)
        transf.rotateY(c / v, -a / v)
        transf.rotateX(v, -b)
    sdat['r'], w = transf.transform_vec(sdat['r'][:, 0], sdat['r'][:, 1],
                                        sdat['r'][:, 2],
                                        np.ones(sdat['r'].shape[0]))
    gdat['r'], w = transf.transform_vec(gdat['r'][:, 0], gdat['r'][:, 1],
                                        gdat['r'][:, 2],
                                        np.ones(gdat['r'].shape[0]))

    # apply 30kpc aperture (i.e. remove all particles outside the aperture)
    applyAperture(sdat, 30e3)
    applyAperture(gdat, 30e3)

    # ---- gather statistics about the data as read from the snapshot

    # information identifying the SKIRT-run record and the galaxy
    info = {}
    info["skirt_run_id"] = record["runid"]
    info["galaxy_id"] = record["galaxyid"]

    # information about the particles
    info["original_particles_stars"] = len(sdat['m'])
    info["original_initial_mass_stars"] = sdat['im'].sum()
    info["original_mass_stars"] = sdat['m'].sum()
    info["original_particles_gas"] = len(gdat['m'])
    info["original_mass_gas"] = gdat['m'].sum()
    info["original_mass_baryons"] = info["original_mass_stars"] + info[
        "original_mass_gas"]

    # information about the direction of the stellar angular momentum axis
    info["original_rotation_axis_x"] = n_rot[0]
    info["original_rotation_axis_y"] = n_rot[1]
    info["original_rotation_axis_z"] = n_rot[2]

    # ---- initialize statistics about the exported data

    info["exported_particles_old_stars"] = 0
    info["exported_initial_mass_old_stars"] = 0
    info["exported_mass_old_stars"] = 0

    info["exported_particles_non_star_forming_gas"] = 0
    info["exported_mass_non_star_forming_gas"] = 0

    info["exported_particles_young_stars_from_stars"] = 0
    info["exported_initial_mass_young_stars_from_stars"] = 0
    info["exported_mass_young_stars_from_stars"] = 0

    info["exported_particles_hii_regions_from_stars"] = 0
    info["exported_initial_mass_hii_regions_from_stars"] = 0
    info["exported_mass_hii_regions_from_stars"] = 0

    info["exported_particles_unspent_gas_from_stars"] = 0
    info["exported_mass_unspent_gas_from_stars"] = 0

    info["exported_particles_young_stars_from_gas"] = 0
    info["exported_initial_mass_young_stars_from_gas"] = 0
    info["exported_mass_young_stars_from_gas"] = 0

    info["exported_particles_hii_regions_from_gas"] = 0
    info["exported_initial_mass_hii_regions_from_gas"] = 0
    info["exported_mass_hii_regions_from_gas"] = 0

    info["exported_particles_negative_gas_from_stars"] = 0
    info["exported_particles_negative_gas_from_gas"] = 0
    info["exported_mass_negative_gas_from_stars"] = 0
    info["exported_mass_negative_gas_from_gas"] = 0

    info["exported_particles_unspent_gas_from_gas"] = 0
    info["exported_mass_unspent_gas_from_gas"] = 0

    # ---- resample star forming regions

    # set the "standard" constant covering fraction (see Camps+ 2016)
    f_PDR = 0.1

    # seed the random generator so that a consistent pseudo-random sequence is used for each particular galaxy
    np.random.seed(int(record["galaxyid"]))

    # define HII region age constants (in years)
    young_age = 1e8  # 100 Myr  --> particles below this age are resampled
    infant_age = 1e7  # 10 Myr   --> resampled particles below this age are converted to HII regions
    #              resampled particles above this age are converted young stars
    #              <==> lifetime of an HII region

    # set up GALAXEV array
    bcstars = np.column_stack([[], [], [], [], [], [], []])

    # set up MAPPINGS-III array
    mapstars = np.column_stack([[], [], [], [], [], [], [], [], []])

    # set up dust array
    dust = np.column_stack([[], [], [], [], [], [], []])

    # index for particles to resample
    issf = gdat['sfr'] > 0.
    isyoung = sdat['t'] < young_age

    # append older stars to GALAXEV array
    if (~isyoung).any():
        bcstars = np.concatenate(
            (bcstars,
             np.column_stack([
                 sdat['r'], sdat['h'], sdat['im'], sdat['Z'], sdat['t']
             ])[~isyoung]),
            axis=0)
        info["exported_particles_old_stars"] = np.count_nonzero(~isyoung)
        info["exported_initial_mass_old_stars"] = sdat['im'][~isyoung].sum()
        info["exported_mass_old_stars"] = sdat['m'][~isyoung].sum()

    # append non-SF gas data to dust array
    if (~issf).any():
        dust = np.concatenate(
            (dust,
             np.column_stack([
                 gdat['r'], gdat['h'], gdat['m'], gdat['Z'], gdat['T']
             ])[~issf].copy()),
            axis=0)
        info["exported_particles_non_star_forming_gas"] = np.count_nonzero(
            ~issf)
        info["exported_mass_non_star_forming_gas"] = gdat['m'][~issf].sum()

    # resample stars
    if isyoung.any():
        for k in sdat.keys():
            sdat[k] = sdat[k][isyoung].copy()

        # calculate SFR at birth of young star particles in M_sun / yr
        sdat['sfr'] = getSFR(sdat['rho_born'], sdat['im'], schmidtparams)

        ms, ts, idxs, mdiffs = stochResamp(sdat['sfr'], sdat['im'])
        isinfant = ts < infant_age

        if (~isinfant).any():
            yngstars['r'] = sdat['r'][idxs][~isinfant]
            yngstars['h'] = sdat['h'][idxs][~isinfant]
            yngstars['im'] = ms[~isinfant]
            yngstars['Z'] = sdat['Z'][idxs][~isinfant]
            yngstars['t'] = ts[~isinfant]
            bcstars = np.concatenate(
                (bcstars,
                 np.column_stack([
                     yngstars['r'], yngstars['h'], yngstars['im'],
                     yngstars['Z'], yngstars['t']
                 ])),
                axis=0)
            info[
                "exported_particles_young_stars_from_stars"] = np.count_nonzero(
                    ~isinfant)
            info["exported_initial_mass_young_stars_from_stars"] = ms[
                ~isinfant].sum()
            info["exported_mass_young_stars_from_stars"] = info[
                "exported_initial_mass_young_stars_from_stars"]

        if (isinfant).any():
            hiiregions['r'] = sdat['r'][idxs][isinfant]
            hiiregions['h'] = sdat['h'][idxs][isinfant]
            hiiregions['SFR'] = ms[
                isinfant] / infant_age  # Assume constant SFR over HII region lifetime
            hiiregions['Z'] = sdat['Z'][idxs][isinfant]
            hiiregions['P'] = sdat['P'][idxs][
                isinfant] * 0.1  # Convert to Pa for output
            hiiregions['logC'] = 0.6 * np.log10(ms[isinfant]) + 0.4 * np.log10(
                hiiregions['P']) - 0.4 * np.log10(params['BOLTZMANN']) + 0.4
            hiiregions['fPDR'] = np.zeros_like(
                ts[isinfant]
            ) + f_PDR  # Covering fraction is set to constant value

            # calculate the HII region smoothing length from the mass of the surrounding PDR region,
            # estimated to be 10 times as massive (see Jonsson et al. 2010, MNRAS 403, 17-44),
            # using SKIRT's standard smoothing kernel mass/size normalization: rho = 8/pi * M/h^3;
            # and randomly shift the positions of the HII regions within a similarly enlarged range
            hiiregions['h_mapp'] = (
                10 * ms[isinfant] /
                (np.pi / 8 * sdat['rho_born'][idxs][isinfant] * densconv))**(
                    1 / 3.)
            stochShiftPos(hiiregions['r'], hiiregions['h'],
                          hiiregions['h_mapp'])

            # append to MAPPINGSIII array
            mapstars = np.concatenate(
                (mapstars,
                 np.column_stack([
                     hiiregions['r'], hiiregions['h_mapp'], hiiregions['SFR'],
                     hiiregions['Z'], hiiregions['logC'], hiiregions['P'],
                     hiiregions['fPDR']
                 ])),
                axis=0)
            info[
                "exported_particles_hii_regions_from_stars"] = np.count_nonzero(
                    isinfant)
            info["exported_initial_mass_hii_regions_from_stars"] = ms[
                isinfant].sum()
            info["exported_mass_hii_regions_from_stars"] = info[
                "exported_initial_mass_hii_regions_from_stars"]

            # append to dust array with negative mass to compensate for the mass of the surrounding PDR region,
            # considered to be 10 times as massive; use zero temperature as T is unavailable for resampled star particles
            dust = np.concatenate(
                (dust,
                 np.column_stack([
                     hiiregions['r'], hiiregions['h_mapp'] * 3.,
                     -10 * ms[isinfant], hiiregions['Z'],
                     np.zeros(hiiregions['Z'].shape[0])
                 ]).copy()),
                axis=0)
            info[
                "exported_particles_negative_gas_from_stars"] = np.count_nonzero(
                    isinfant)
            info["exported_mass_negative_gas_from_stars"] = 10 * ms[
                isinfant].sum()

        # add unspent young star particle material to dust array
        # use zero temperature as T is unavailable for resampled star particles
        mass = sdat['im'] - mdiffs
        dust = np.concatenate((dust,
                               np.column_stack([
                                   sdat['r'], sdat['h'], mass, sdat['Z'],
                                   np.zeros(sdat['Z'].shape[0])
                               ]).copy()),
                              axis=0)
        info["exported_particles_unspent_gas_from_stars"] = len(mass)
        info["exported_mass_unspent_gas_from_stars"] = mass.sum()

    # resample gas
    if issf.any():
        for k in gdat.keys():
            gdat[k] = gdat[k][issf].copy()

        ms, ts, idxs, mdiffs = stochResamp(gdat['sfr'], gdat['m'])
        isinfant = ts < infant_age

        if (~isinfant).any():
            yngstars['r'] = gdat['r'][idxs][~isinfant]
            yngstars['h'] = gdat['h'][idxs][~isinfant]
            yngstars['im'] = ms[~isinfant]
            yngstars['Z'] = gdat['Z'][idxs][~isinfant]
            yngstars['t'] = ts[~isinfant]
            bcstars = np.concatenate(
                (bcstars,
                 np.column_stack([
                     yngstars['r'], yngstars['h'], yngstars['im'],
                     yngstars['Z'], yngstars['t']
                 ])),
                axis=0)
            info["exported_particles_young_stars_from_gas"] = np.count_nonzero(
                ~isinfant)
            info["exported_initial_mass_young_stars_from_gas"] = ms[
                ~isinfant].sum()
            info["exported_mass_young_stars_from_gas"] = info[
                "exported_initial_mass_young_stars_from_gas"]

        if (isinfant).any():
            hiiregions['r'] = gdat['r'][idxs][isinfant]
            hiiregions['h'] = gdat['h'][idxs][isinfant]
            hiiregions['SFR'] = ms[
                isinfant] / infant_age  # Assume constant SFR over HII region lifetime
            hiiregions['Z'] = gdat['Z'][idxs][isinfant]
            hiiregions['P'] = gdat['P'][idxs][isinfant] * 0.1  # convert to Pa
            hiiregions['logC'] = 0.6 * np.log10(ms[isinfant]) + 0.4 * np.log10(
                hiiregions['P']) - 0.4 * np.log10(params['BOLTZMANN']) + 0.4
            hiiregions['fPDR'] = np.zeros_like(
                ts[isinfant]
            ) + f_PDR  # Covering fraction is set to constant value

            # calculate the HII region smoothing length from the mass of the surrounding PDR region,
            # estimated to be 10 times as massive (see Jonsson et al. 2010, MNRAS 403, 17-44),
            # using SKIRT's standard smoothing kernel mass/size normalization: rho = 8/pi * M/h^3;
            # and randomly shift the positions of the HII regions within a similarly enlarged range
            hiiregions['h_mapp'] = (
                10 * ms[isinfant] /
                (np.pi / 8 * gdat['rho'][idxs][isinfant] * densconv))**(1 / 3.)
            stochShiftPos(hiiregions['r'], hiiregions['h'],
                          hiiregions['h_mapp'])

            # append to MAPPINGSIII array
            mapstars = np.concatenate(
                (mapstars,
                 np.column_stack([
                     hiiregions['r'], hiiregions['h_mapp'], hiiregions['SFR'],
                     hiiregions['Z'], hiiregions['logC'], hiiregions['P'],
                     hiiregions['fPDR']
                 ])),
                axis=0)
            info["exported_particles_hii_regions_from_gas"] = np.count_nonzero(
                isinfant)
            info["exported_initial_mass_hii_regions_from_gas"] = ms[
                isinfant].sum()
            info["exported_mass_hii_regions_from_gas"] = info[
                "exported_initial_mass_hii_regions_from_gas"]

            # append to dust array with negative mass to compensate for the mass of the surrounding PDR region,
            # considered to be 10 times as massive; use negative temperature to indicate that it is not a physical value
            dust = np.concatenate(
                (dust,
                 np.column_stack([
                     hiiregions['r'], hiiregions['h_mapp'] * 3, -10 *
                     ms[isinfant], hiiregions['Z'], -gdat['T'][idxs][isinfant]
                 ]).copy()),
                axis=0)
            info[
                "exported_particles_negative_gas_from_gas"] = np.count_nonzero(
                    isinfant)
            info["exported_mass_negative_gas_from_gas"] = 10 * ms[
                isinfant].sum()

        # add unspent SF gas material to dust array; use negative temperature to indicate that it is not a physical value
        mass = gdat['m'] - mdiffs
        dust = np.concatenate(
            (dust,
             np.column_stack(
                 [gdat['r'], gdat['h'], mass, gdat['Z'], -gdat['T']]).copy()),
            axis=0)
        info["exported_particles_unspent_gas_from_gas"] = len(mass)
        info["exported_mass_unspent_gas_from_gas"] = mass.sum()

    # ---- make some sums and write the statistics and output files

    info["exported_particles_young_stars"] = info[
        "exported_particles_young_stars_from_stars"] + info[
            "exported_particles_young_stars_from_gas"]
    info["exported_initial_mass_young_stars"] = info[
        "exported_initial_mass_young_stars_from_stars"] + info[
            "exported_initial_mass_young_stars_from_gas"]
    info["exported_mass_young_stars"] = info[
        "exported_mass_young_stars_from_stars"] + info[
            "exported_mass_young_stars_from_gas"]

    info["exported_particles_stars"] = info[
        "exported_particles_old_stars"] + info["exported_particles_young_stars"]
    info["exported_initial_mass_stars"] = info[
        "exported_initial_mass_old_stars"] + info[
            "exported_initial_mass_young_stars"]
    info["exported_mass_stars"] = info["exported_mass_old_stars"] + info[
        "exported_mass_young_stars"]

    info["exported_particles_hii_regions"] = info[
        "exported_particles_hii_regions_from_stars"] + info[
            "exported_particles_hii_regions_from_gas"]
    info["exported_initial_mass_hii_regions"] = info[
        "exported_initial_mass_hii_regions_from_stars"] + info[
            "exported_initial_mass_hii_regions_from_gas"]
    info["exported_mass_hii_regions"] = info[
        "exported_mass_hii_regions_from_stars"] + info[
            "exported_mass_hii_regions_from_gas"]

    info["exported_particles_unspent_gas"] = info[
        "exported_particles_unspent_gas_from_stars"] + info[
            "exported_particles_unspent_gas_from_gas"]
    info["exported_mass_unspent_gas"] = info[
        "exported_mass_unspent_gas_from_stars"] + info[
            "exported_mass_unspent_gas_from_gas"]

    info["exported_particles_negative_gas"] = info[
        "exported_particles_negative_gas_from_stars"] + info[
            "exported_particles_negative_gas_from_gas"]
    info["exported_mass_negative_gas"] = info[
        "exported_mass_negative_gas_from_stars"] + info[
            "exported_mass_negative_gas_from_gas"]

    info["exported_particles_gas"] = info[
        "exported_particles_non_star_forming_gas"] + info[
            "exported_particles_unspent_gas"] + info[
                "exported_particles_negative_gas"]
    info["exported_mass_gas"] = info[
        "exported_mass_non_star_forming_gas"] + info[
            "exported_mass_unspent_gas"]  # - info["exported_mass_negative_gas"]
    info["exported_mass_baryons"] = info["exported_mass_stars"] + info[
        "exported_mass_hii_regions"] + info["exported_mass_gas"]

    # create the appropriate SKIRT-run directories
    skirtrun = SkirtRun(record["runid"], create=True)
    filepathprefix = os.path.join(
        skirtrun.inpath(), "{}_{}_".format(record["eaglesim"],
                                           record["galaxyid"]))

    # write the statistics file
    infofile = open(filepathprefix + "info.txt", 'w')
    infofile.write(
        '# Statistics for SPH particles extracted from EAGLE HDF5 snapshot to SKIRT6 format\n'
    )
    infofile.write('# Masses are expressed in solar mass units\n')
    maxkeylen = max(map(len, info.keys()))
    for key in sorted(info.keys()):
        valueformat = "d" if "_particles_" in key or "_id" in key else ".9e"
        infofile.write(("{0:" + str(maxkeylen) + "} = {1:15" + valueformat +
                        "}\n").format(key, info[key]))
    infofile.close()

    # ---- write output files

    # open output files
    starsfile = open(filepathprefix + "stars.dat", 'w')
    starsfile.write('# SPH Star Particles\n')
    starsfile.write('# Extracted from EAGLE HDF5 snapshot to SKIRT6 format\n')
    starsfile.write(
        '# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) t(yr)\n')
    gasfile = open(filepathprefix + "gas.dat", 'w')
    gasfile.write('# SPH Gas Particles\n')
    gasfile.write('# Extracted from EAGLE HDF5 snapshot to SKIRT6 format\n')
    gasfile.write(
        '# Columns contain: x(pc) y(pc) z(pc) h(pc) M(Msun) Z(0-1) T(K)\n')
    hiifile = open(filepathprefix + "hii.dat", 'w')
    hiifile.write('# SPH Hii Particles\n')
    hiifile.write('# Extracted from EAGLE HDF5 snapshot to SKIRT6 format\n')
    hiifile.write(
        '# Columns contain: x(pc) y(pc) z(pc) h(pc) SFR(Msun/yr) Z(0-1) logC P(Pa) f_PDR\n'
    )

    # save particle data
    np.savetxt(starsfile, bcstars, fmt=['%f'] * 7)
    np.savetxt(gasfile, dust, fmt=['%f'] * 7)
    np.savetxt(hiifile, mapstars, fmt=['%f'] * 7 + ['%e', '%f'])

    # close output files
    starsfile.close()
    gasfile.close()
    hiifile.close()
Example #15
0
    def select(self, centre, region_size):  # Region size in Mpc
        if not self.quiet:
            print 'Loading region...'

        code_centre = centre * self.h / (
            self.a_0 * 1e3)  # convert to h-less comoving code units
        code_region_size = region_size * self.h / self.a_0

        centre_mpc = centre / 1e3

        # Point read_eagle to the data
        snapfile = self.sim_path + 'snapshot_' + self.tag + '/snap_' + self.tag + '.0.hdf5'

        # Open snapshot
        snap = read.EagleSnapshot(snapfile)
        # Select region of interest
        snap.select_region(code_centre[0] - code_region_size / 2.,
                           code_centre[0] + code_region_size / 2.,
                           code_centre[1] - code_region_size / 2.,
                           code_centre[1] + code_region_size / 2.,
                           code_centre[2] - code_region_size / 2.,
                           code_centre[2] + code_region_size / 2.)

        if self.property == 'stars':
            pos = snap.read_dataset(4, 'Coordinates') * self.a_0 / self.h
            smoothing_length = snap.read_dataset(
                4, 'SmoothingLength') * self.a_0 / self.h
            quantity = snap.read_dataset(4, 'Mass') / self.h * 1e10

        else:
            pos = snap.read_dataset(0, 'Coordinates') * self.a_0 / self.h
            smoothing_length = snap.read_dataset(
                0, 'SmoothingLength') * self.a_0 / self.h

            if self.property == 'gas':
                quantity = snap.read_dataset(0, 'Mass') / self.h / 1e10

            elif self.property == 'ion':  # do this in CGS units

                self.masses_u_dict = {
                    'Hydrogen': 1.00794,
                    'Carbon': 12.0107,
                    'Oxygen': 15.9994
                }

                p_mass = snap.read_dataset(
                    0, 'Mass') / self.h * C.unit_mass_cgs  # in g
                p_density = snap.read_dataset(
                    0, "Density"
                ) * self.h**2 / self.a_0**3 * C.unit_density_cgs  # in g cm^-3
                p_temp = snap.read_dataset(0, "Temperature")
                el_mass_abund = snap.read_dataset(
                    0, 'SmoothedElementAbundance/' + self.element)
                H_abund = snap.read_dataset(
                    0, 'SmoothedElementAbundance/Hydrogen')
                p_nH = p_density * H_abund / C.m_H_cgs

                ion_fraction = ionbal.find_ionbal(self.z, self.ion,
                                                  np.log10(p_nH),
                                                  np.log10(p_temp))

                mass_in_ion = p_mass * el_mass_abund * ion_fraction

                quantity = mass_in_ion / 1e40

            elif self.property == 'xrays':
                pids = snap.read_dataset(0, 'ParticleIDs')
                quantity = self.xrays[np.searchsorted(self.xray_pids, pids)]
            else:
                raise IOError(
                    'Plot options are "gas","ion", stars" or "xrays"')

        # read_eagle loads in more than we actually asked for above. We need to mask to the region size again!
        posmask = np.where(
            (np.absolute(pos[:, 0] - centre_mpc[0]) < region_size / 2.)
            & (np.absolute(pos[:, 1] - centre_mpc[1]) < region_size / 2.)
            & (np.absolute(pos[:, 2] - centre_mpc[2]) < region_size / 2.))[0]

        pos = pos[posmask, :]
        smoothing_length = smoothing_length[posmask]
        quantity = quantity[posmask]

        if self.property == 'ion':
            mass_in_ion = mass_in_ion[posmask]

        if not self.quiet:
            print 'Wrapping box...'
        pos = ne.evaluate("pos-centre_mpc")
        pos[pos[:, 0] < (-1. * self.boxsize / 2.), 0] += self.boxsize
        pos[pos[:, 1] < (-1. * self.boxsize / 2.), 1] += self.boxsize
        pos[pos[:, 2] < (-1. * self.boxsize / 2.), 2] += self.boxsize
        pos[pos[:, 0] > self.boxsize / 2., 0] -= self.boxsize
        pos[pos[:, 1] > self.boxsize / 2., 1] -= self.boxsize
        pos[pos[:, 2] > self.boxsize / 2., 2] -= self.boxsize
        pos = ne.evaluate("pos+centre_mpc")

        N = len(quantity)

        pos *= 1e3  # convert to kpc
        smoothing_length *= 1e3
        if not self.quiet:
            print 'Creating scene...'
        Particles = sphviewer.Particles(pos, quantity, hsml=smoothing_length)
        self.Scene = sphviewer.Scene(Particles)

        if self.property == 'ion':
            self.ion_positions = pos / 1e3
            self.ion_hsml = smoothing_length / 1e3
            self.ion_quantity = mass_in_ion / (
                C.m_sol_cgs * self.masses_u_dict[self.element])
            self.ion_centre = centre / 1e3
Example #16
0
    def read_chunk(self, groupnumber, centre, bulkvel, regionsize):
        '''
        This function uses the read_eagle module to very quickly load in a chunk of a snapshot
        around a particular FOF group with side length equal to the region size you want to save.
        It centres the co-ordinates on the centre of potential of the group (which you have input),
        and wraps the periodic box. It then masks this chunk to a spherical region with a diameter
        equal to the region size. Using this mask, the module loads in many other properties for the
        particles in the region and generates a dictionary to store them. This is done for each
        particle type, then all the dictionaries are wrapped up in another one and output at the end.
        '''

        #print 'Reading gas particle data'
        ptype = 0
        gas_dict = {}

        code_centre = centre * self.h / self.a_0  # convert to h-less comoving code units
        code_regionsize = regionsize * self.h / self.a_0
        code_boxsize = self.physical_boxsize * self.h / self.a_0

        r200 = self.r200s[groupnumber - 1]

        # Open snapshot
        snap = read.EagleSnapshot(self.snapfile)

        # Select region of interest
        snap.select_region(
            code_centre[0] - code_regionsize / 2.,
            code_centre[0] + code_regionsize / 2.,
            code_centre[1] - code_regionsize / 2.,
            code_centre[1] + code_regionsize / 2.,
            code_centre[2] - code_regionsize / 2.,
            code_centre[2] + code_regionsize / 2.,
        )

        pos = snap.read_dataset(0, 'Coordinates')  # in code units

        pos -= code_centre  # centre on galaxy

        # Wrap box
        pos[pos[:, 0] < (-1. * code_boxsize / 2.), 0] += code_boxsize
        pos[pos[:, 1] < (-1. * code_boxsize / 2.), 1] += code_boxsize
        pos[pos[:, 2] < (-1. * code_boxsize / 2.), 2] += code_boxsize
        pos[pos[:, 0] > code_boxsize / 2., 0] -= code_boxsize
        pos[pos[:, 1] > code_boxsize / 2., 1] -= code_boxsize
        pos[pos[:, 2] > code_boxsize / 2., 2] -= code_boxsize

        # Convert to physical units
        pos *= self.a_0 / self.h

        # Mask to region size
        rmax = regionsize / 2.
        r2 = np.einsum('...j,...j->...', pos,
                       pos)  # get the radii from the centre
        mask = np.where(r2 < rmax**2)[0]  # make the mask

        gas_dict['Coordinates'] = pos[mask]

        # Load in everything else. If you want any other quantities, simply add a similar line here
        # Don't forget to convert with factors of a and h as done here!
        gas_dict['Velocity'] = snap.read_dataset(
            ptype, "Velocity")[mask, :] * np.sqrt(
                self.a_0) - bulkvel  # subtract off halo velocity
        gas_dict['Mass'] = snap.read_dataset(ptype, "Mass")[mask] / self.h
        gas_dict['Density'] = snap.read_dataset(
            ptype, "Density")[mask] * self.h**2 / self.a_0**3
        gas_dict['Temperature'] = snap.read_dataset(ptype, "Temperature")[mask]
        gas_dict['ParticleIDs'] = snap.read_dataset(ptype, "ParticleIDs")[mask]
        gas_dict['GroupNumber'] = snap.read_dataset(ptype, "GroupNumber")[mask]
        gas_dict['StarFormationRate'] = snap.read_dataset(
            ptype, "StarFormationRate")[mask]
        gas_dict['Metallicity'] = snap.read_dataset(ptype, "Metallicity")[mask]
        gas_dict['SmoothedMetallicity'] = snap.read_dataset(
            ptype, "SmoothedMetallicity")[mask]
        gas_dict['SmoothingLength'] = snap.read_dataset(
            ptype, "SmoothingLength")[mask] * self.a_0 / self.h
        gas_dict['OnEquationOfState'] = snap.read_dataset(
            ptype, "OnEquationOfState")[mask]
        gas_dict['MaximumTemperature'] = snap.read_dataset(
            ptype, "MaximumTemperature")[mask]
        gas_dict['AExpMaximumTemperature'] = snap.read_dataset(
            ptype, "AExpMaximumTemperature")[mask]
        gas_dict['InternalEnergy'] = snap.read_dataset(ptype,
                                                       "InternalEnergy")[mask]

        # I wrap up all the smoothed abundances into one big array, you can save them individually if you want.
        # Just create individaul entries in gas_dict for each element instead
        H = snap.read_dataset(ptype, "SmoothedElementAbundance/Hydrogen")[mask]
        He = snap.read_dataset(ptype, "SmoothedElementAbundance/Helium")[mask]
        C = snap.read_dataset(ptype, "SmoothedElementAbundance/Carbon")[mask]
        N = snap.read_dataset(ptype, "SmoothedElementAbundance/Nitrogen")[mask]
        O = snap.read_dataset(ptype, "SmoothedElementAbundance/Oxygen")[mask]
        Ne = snap.read_dataset(ptype, "SmoothedElementAbundance/Neon")[mask]
        Mg = snap.read_dataset(ptype,
                               "SmoothedElementAbundance/Magnesium")[mask]
        Si = snap.read_dataset(ptype, "SmoothedElementAbundance/Silicon")[mask]
        S = Si * 0.6054160
        Ca = Si * 0.0941736
        Fe = snap.read_dataset(ptype, "SmoothedElementAbundance/Iron")[mask]
        gas_dict['Abundances'] = np.dstack(
            (H, He, C, N, O, Ne, Mg, Si, S, Ca, Fe))[0]

        #print 'Reading dark matter particle data'
        ptype = 1
        DM_dict = {}

        pos = snap.read_dataset(1, 'Coordinates')  # in code units
        pos -= code_centre  # centre on galaxy

        # Wrap box
        pos[pos[:, 0] < (-1. * code_boxsize / 2.), 0] += code_boxsize
        pos[pos[:, 1] < (-1. * code_boxsize / 2.), 1] += code_boxsize
        pos[pos[:, 2] < (-1. * code_boxsize / 2.), 2] += code_boxsize
        pos[pos[:, 0] > code_boxsize / 2., 0] -= code_boxsize
        pos[pos[:, 1] > code_boxsize / 2., 1] -= code_boxsize
        pos[pos[:, 2] > code_boxsize / 2., 2] -= code_boxsize

        # Convert to physical units
        pos *= self.a_0 / self.h

        # Mask to rmax
        r2 = np.einsum('...j,...j->...', pos,
                       pos)  # get the radii from the centre
        mask = np.where(r2 < rmax**2)[0]  # make the mask

        DM_dict['Coordinates'] = pos[mask]

        # Load in everything else
        DM_dict['Velocity'] = snap.read_dataset(
            ptype, "Velocity")[mask, :] * np.sqrt(self.a_0) - bulkvel
        DM_dict['Mass'] = np.ones(len(mask)) * self.masstable[1]
        DM_dict['ParticleIDs'] = snap.read_dataset(ptype, "ParticleIDs")[mask]
        DM_dict['GroupNumber'] = snap.read_dataset(ptype, "GroupNumber")[mask]

        #print 'Reading star particle data'
        ptype = 4
        star_dict = {}

        pos = snap.read_dataset(4, 'Coordinates')  # in code units
        pos -= code_centre  # centre on galaxy

        # Wrap box
        pos[pos[:, 0] < (-1. * code_boxsize / 2.), 0] += code_boxsize
        pos[pos[:, 1] < (-1. * code_boxsize / 2.), 1] += code_boxsize
        pos[pos[:, 2] < (-1. * code_boxsize / 2.), 2] += code_boxsize
        pos[pos[:, 0] > code_boxsize / 2., 0] -= code_boxsize
        pos[pos[:, 1] > code_boxsize / 2., 1] -= code_boxsize
        pos[pos[:, 2] > code_boxsize / 2., 2] -= code_boxsize

        # Convert to physical units
        pos *= self.a_0 / self.h

        # Mask to rmax
        r2 = np.einsum('...j,...j->...', pos,
                       pos)  # get the radii from the centre
        mask = np.where(r2 < rmax**2)[0]  # make the mask

        star_dict['Coordinates'] = pos[mask]

        # Load in everything else
        star_dict['Velocity'] = snap.read_dataset(
            ptype, "Velocity")[mask, :] * np.sqrt(self.a_0) - bulkvel
        star_dict['Mass'] = snap.read_dataset(ptype, "Mass")[mask] / self.h
        star_dict['Density'] = snap.read_dataset(
            ptype, "BirthDensity")[mask] * self.h**2 / self.a_0**3
        star_dict['ParticleIDs'] = snap.read_dataset(ptype,
                                                     "ParticleIDs")[mask]
        star_dict['GroupNumber'] = snap.read_dataset(ptype,
                                                     "GroupNumber")[mask]
        star_dict['Metallicity'] = snap.read_dataset(ptype,
                                                     "Metallicity")[mask]
        star_dict['SmoothingLength'] = snap.read_dataset(
            ptype, "SmoothingLength")[mask] * self.a_0 / self.h

        H = snap.read_dataset(ptype, "SmoothedElementAbundance/Hydrogen")[mask]
        He = snap.read_dataset(ptype, "SmoothedElementAbundance/Helium")[mask]
        C = snap.read_dataset(ptype, "SmoothedElementAbundance/Carbon")[mask]
        N = snap.read_dataset(ptype, "SmoothedElementAbundance/Nitrogen")[mask]
        O = snap.read_dataset(ptype, "SmoothedElementAbundance/Oxygen")[mask]
        Ne = snap.read_dataset(ptype, "SmoothedElementAbundance/Neon")[mask]
        Mg = snap.read_dataset(ptype,
                               "SmoothedElementAbundance/Magnesium")[mask]
        Si = snap.read_dataset(ptype, "SmoothedElementAbundance/Silicon")[mask]
        S = Si * 0.6054160
        Ca = Si * 0.0941736
        Fe = snap.read_dataset(ptype, "SmoothedElementAbundance/Iron")[mask]
        star_dict['Abundances'] = np.dstack(
            (H, He, C, N, O, Ne, Mg, Si, S, Ca, Fe))[0]

        if self.incBH:
            #print 'Reading black hole particle data'
            ptype = 5
            BH_dict = {}

            pos = snap.read_dataset(5, 'Coordinates')  # in code units

            pos -= code_centre  # centre on galaxy

            # Wrap box
            pos[pos[:, 0] < (-1. * code_boxsize / 2.), 0] += code_boxsize
            pos[pos[:, 1] < (-1. * code_boxsize / 2.), 1] += code_boxsize
            pos[pos[:, 2] < (-1. * code_boxsize / 2.), 2] += code_boxsize
            pos[pos[:, 0] > code_boxsize / 2., 0] -= code_boxsize
            pos[pos[:, 1] > code_boxsize / 2., 1] -= code_boxsize
            pos[pos[:, 2] > code_boxsize / 2., 2] -= code_boxsize

            # Convert to physical units
            pos *= self.a_0 / self.h

            # Mask to rmax
            r2 = np.einsum('...j,...j->...', pos,
                           pos)  # get the radii from the centre
            mask = np.where(r2 < rmax**2)[0]  # make the mask

            BH_dict['Coordinates'] = pos[mask]

            # Load in everything else
            BH_dict['Velocity'] = snap.read_dataset(
                ptype, "Velocity")[mask, :] * np.sqrt(self.a_0) - bulkvel
            BH_dict['Mass'] = snap.read_dataset(ptype,
                                                "BH_Mass")[mask] / self.h
            BH_dict['Density'] = snap.read_dataset(
                ptype, "BH_Density")[mask] * self.h**2 / self.a_0**3
            BH_dict['ParticleIDs'] = snap.read_dataset(ptype,
                                                       "ParticleIDs")[mask]
            BH_dict['GroupNumber'] = snap.read_dataset(ptype,
                                                       "GroupNumber")[mask]
            BH_dict['SmoothingLength'] = snap.read_dataset(
                ptype, "SmoothingLength")[mask] * self.a_0 / self.h

        volume = {}
        volume['GroupNumber'] = groupnumber
        volume['CentreOfPotential'] = centre
        volume['BulkVelocity'] = bulkvel
        volume['r200'] = r200

        # Put everything together into one dictionary to return

        halo_dict = {}
        halo_dict['Gas'] = gas_dict
        halo_dict['DarkMatter'] = DM_dict
        halo_dict['Stars'] = star_dict
        if self.incBH:
            halo_dict['BlackHoles'] = BH_dict
        halo_dict['Volume'] = volume

        return halo_dict