def __init__(self, args): """Load star data of entire simulation.""" # Get birth time and mass of all stars in this halo. vr_snap = hd.read_attribute(f'{args.wdir}{args.bh_file}', 'Haloes', 'VR_Snapshot') if vr_snap is None: self.is_set_up = False return snap_file = f'{args.wdir}{args.snap_name}_{vr_snap:04d}.hdf5' stellar_aexp = hd.read_data(snap_file, 'PartType4/BirthScaleFactors') self.birth_times = aexp_to_time(stellar_aexp) self.mass = hd.read_data(snap_file, 'PartType4/InitialMasses') * 1e10 self.ids = hd.read_data(snap_file, 'PartType4/ParticleIDs') coordinates = hd.read_data(snap_file, 'PartType4/Coordinates') coordinates *= (hd.read_attribute(snap_file, 'Header', 'Scale-factor')[0]) # Look up star IDs in VR vr_file = f'{args.wdir}{args.vr_file}_{vr_snap:04d}' self.haloes, vr_zred, vr_aexp = xl.connect_ids_to_vr(self.ids, vr_file, require=True) # Get radii of stars halo_centres = hd.read_data(f'{vr_file}.hdf5', 'MinimumPotential/Coordinates') self.radii = np.linalg.norm(coordinates - halo_centres[self.haloes, :], axis=1) * 1e3 ind_not_in_halo = np.nonzero(self.haloes < 0)[0] self.radii[ind_not_in_halo] = -1 self.is_set_up = True
def get_snapshot_redshifts(args): """Look up the redshifts of all snapshots.""" args.snaps_zred = np.zeros(max(args.snapshots) + 1) - 1 for isnap in args.snapshots: snap_file = f'{args.wdir}{args.snap_name}_{isnap:04d}.hdf5' args.snaps_zred[isnap] = hd.read_attribute(snap_file, 'Header', 'Redshift')[0]
def process_snap(isim, wdir, isnap, args): """Process one specific snapshot.""" snapfile = wdir + f'{args.snap_name}_{isnap:04d}.hdf5' vrfile = wdir + f'{args.vr_name}_{isnap:04d}.properties' if args.extract_bh and os.path.isfile(snapfile): extract_bh_data(snapfile, isim, isnap, args) if args.extract_vr and os.path.isfile(vrfile): extract_vr_data(vrfile, isim, isnap, args) if os.path.isfile(snapfile): zred = hd.read_attribute(snapfile, 'Header', 'Redshift')[0] hd.write_attribute(args.outfile, 'Header', 'Redshift', zred)
def extract_bh_data(snapfile, isim, isnap, args): """Extract BH data from one particular file""" if isinstance(isim, int): pre = f'ID{isim}/S{isnap}/BH/' else: pre = f'{isim}/S{isnap}/BH/' zred = hd.read_attribute(snapfile, 'Header', 'Redshift')[0] hd.write_attribute(args.outfile, pre, 'Redshift', zred) for field in args.bh_fields: data = hd.read_data(snapfile, 'PartType5/' + field) if data is not None: hd.write_data(args.outfile, pre + '/' + field, data)
def add_coda_to_offsets(vr_part_file): """Add the coda to particle offsets.""" num_name = { 'Haloes': 'NumberOfBoundParticles_Total', 'Unbound': 'NumberOfUnboundParticles_Total', 'Groups': 'NumberOfSOParticles_Total' } for grp in ['Haloes', 'Unbound', 'Groups']: offsets = read_data(vr_part_file, f'{grp}/Offsets') num_ids = read_attribute(vr_part_file, 'Header', num_name[grp]) offsets = np.concatenate((offsets, [num_ids])) write_data(vr_part_file, f'{grp}/Offsets', offsets)
def get_vr_props(args): """Construct VR file names and redshift for matching.""" if args.vr_snap is None: return if args.combined_vr: args.vr_particles = args.wdir + f'{args.vr_file}_{args.vr_snap:04d}_particles.hdf5' args.vr_outfile = args.wdir + f'{args.vr_file}_{args.vr_snap:04d}.hdf5' else: print("Please transcribe VR catalogue...") set_trace() aexp = float( hd.read_attribute(args.vr_outfile, 'SimulationInfo', 'ScaleFactor')) args.vr_zred = 1 / aexp - 1 args.vr_aexp = aexp
def connect_ids_to_vr(ids, vr_file, require=False): """Core function to find the VR halo of a set of IDs.""" num_ids = len(ids) vr_particle_file = f'{vr_file}_particles.hdf5' vr_main_file = f'{vr_file}.hdf5' # Abort if VR catalogue could not be found if ((not os.path.isfile(vr_particle_file)) or (not os.path.isfile(vr_main_file))): print(f"VR catalogue {vr_file} does not exist...") if require: set_trace() return None # Find redshift of VR catalogue aexp = float(hd.read_attribute(vr_main_file, 'SimulationInfo', 'ScaleFactor')) zred = 1/aexp - 1 print(f"Connecting to VR catalogue {vr_file} at redshift {zred}...") # Load VR particle IDs vr_ids = hd.read_data(vr_particle_file, 'Haloes/IDs') vr_offsets = hd.read_data(vr_particle_file, 'Haloes/Offsets') # Locate 'our' IDss in the VR ID list print("Locating IDs in VR list...") stime = time.time() ind_in_vr, found_in_vr = hx.find_id_indices(ids, vr_ids) print(f"... took {(time.time() - stime):.3f} sec., located " f"{len(found_in_vr)} " f"/ {num_ids} IDs in VR list ({len(found_in_vr)/num_ids*100:.3f}%).") # Now convert VR particle indices to halo indices halo_guess = np.searchsorted(vr_offsets, ind_in_vr[found_in_vr], side='right') - 1 ind_good = np.nonzero(ind_in_vr[found_in_vr] < vr_offsets[halo_guess+1])[0] vr_halo = np.zeros(num_ids, dtype=int) - 1 vr_halo[found_in_vr[ind_good]] = halo_guess[ind_good] print(f"... could match {len(ind_good)} / {num_ids} IDs to haloes. " f"({len(ind_good)/num_ids*100:.3f}%).") return vr_halo, aexp, zred
def setup_output(args): """Set up a dict of arrays to hold the various black hole data.""" # Get the names of all existing BH data sets snapfile = args.wdir + args.snap_name + f'_{args.first_snap:04d}.hdf5' bh_datasets = hd.list_datasets(snapfile, 'PartType5') print(f"There are {len(bh_datasets)} BH data sets...") # Starting from empty dict, add one array for each data set (except IDs) output_dict = {} comment_dict = {} for dset in bh_datasets: # We don't need to load particle IDs, have these already if dset == 'ParticleIDs': continue if args.include is not None and dset not in args.include: continue if args.exclude is not None and dset in args.exclude: continue # For simplicity, read the data set in to get its shape/type data = hd.read_data(snapfile, f'PartType5/{dset}') comment = hd.read_attribute(snapfile, f'PartType5/{dset}', 'Description') if data is None: print(f"Strange -- could not read BH data set {dset}?!") set_trace() outshape = list(data.shape) outshape[0] = args.num_bhs outshape.append(args.num_bh_snaps) array = np.zeros(tuple(outshape), data.dtype) + np.nan # Add array to overall dict output_dict[dset] = array comment_dict[dset] = comment print("... finished creating output arrays.") args.times = np.zeros(args.num_bh_snaps) args.redshifts = np.zeros(args.num_bh_snaps) return output_dict, comment_dict
def get_birth_densities(args, isnap): """Get the birth densities of all stars in a given snapshot.""" snap_file = f'{args.wdir}{args.snap_name}_{isnap:04d}.hdf5' birth_densities = hd.read_data(snap_file, 'PartType4/BirthDensities') masses = hd.read_data(snap_file, 'PartType4/InitialMasses') # Get conversion factor to n_H [cm^-3] m_p = 1.673e-24 # Proton mass in g X_H = 0.752 # Hydrogen mass fraction (primordial) rho_to_cgs_factor = hd.read_attribute( snap_file, 'PartType4/BirthDensities', 'Conversion factor to physical CGS (including cosmological ' 'corrections)') rho_to_nH_cgs = (X_H / m_p) * (rho_to_cgs_factor) # Convert the densities to the "critical density" (see Crain+15) n_crit = 10.0 * (1.81)**(-1/2) # in n_H [cm^-3] return np.log10(birth_densities * rho_to_nH_cgs / n_crit), masses
def get_star_haloes(args, isnap): """Get the halo ID of all stars in a given snapshot.""" snap_file = f'{args.wdir}{args.snap_name}_{isnap:04d}.hdf5' star_ids = hd.read_data(snap_file, 'PartType4/ParticleIDs') star_coordinates = hd.read_data(snap_file, 'PartType4/Coordinates') star_coordinates *= ( hd.read_attribute(snap_file, 'Header', 'Scale-factor')[0]) vr_file = f'{args.wdir}{args.vr_name}_{isnap:04d}' star_haloes, aexp, zred = ( xl.connect_ids_to_vr(star_ids, vr_file, require=True)) # Get radii of stars halo_centres = hd.read_data(f'{vr_file}.hdf5', 'MinimumPotential/Coordinates') star_radii = np.linalg.norm(star_coordinates - halo_centres[star_haloes, :], axis=1) * 1e3 ind_not_in_halo = np.nonzero(star_haloes < 0)[0] star_radii[ind_not_in_halo] = -1 args.aexp = aexp args.zred = zred return star_haloes, star_radii, aexp
def process_output(iisnap, isnap, output_dict, bpart_ids, args, bpart_rev_ids=None): """Transcribe black hole data from one simulation output file. Parameters: ----------- iisnap : int Index of currently processed output in collective array. isnap : int Simulation index of currently processed output. output_dict : dict of ndarrays Dictionary containing arrays to be filled with data. bpart_ids : ndarray The IDs of black holes to fill into output lists. args : dict of values Configuration parameters. rev : bool, optional If True, assume that bpart_ids is actually the reverse list of BH IDs. """ if iisnap % 50 == 0: print(f"Transcribing BH data for snapshot {isnap}...") stime = time.time() snapfile = args.wdir + args.snap_name + f'_{isnap:04d}.hdf5' # Get the names of all data sets to transcribe dataset_list = list(output_dict.keys()) cstime = time.time() # Load IDs of particles in current output snapshot: bpart_ids_curr = hd.read_data(snapfile, 'PartType5/ParticleIDs') # Convert them to 'Black-IDs', i.e. their index in the output list if bpart_rev_ids is not None: rstime = time.time() bh_ids = bpart_rev_ids.query(bpart_ids_curr) #print(f"Querying {isnap} took {(time.time()-rstime):.3f} sec.") ind_matched = np.nonzero(bh_ids >= 0)[0] rstime = time.time() #print(f"Checking {isnap} took {(time.time()-rstime):.3f} sec.") else: fstime = time.time() bh_ids, ind_matched = hx.find_id_indices(bpart_ids_curr, bpart_ids) print(f"FII {isnap} took {(time.time()-fstime):.3f} sec.") if len(ind_matched) != len(bpart_ids_curr): print(f"Why can't we match all BHs from output {isnap}?!?") set_trace() cetime = time.time() if iisnap % 50 == 0: print(f"... lookup took {cetime - cstime:.3f} sec.") # Load the time and redshift of current output redshift = hd.read_attribute(snapfile, 'Header', 'Redshift')[0] sim_time = hd.read_attribute(snapfile, 'Header', 'Time')[0] utime = hd.read_attribute(snapfile, 'Units', 'Unit time in cgs (U_t)')[0] utime /= (3600.0 * 24 * 365.24 * 1e9) # Convert from sec to Gyr sim_time *= utime args.times[iisnap] = sim_time args.redshifts[iisnap] = redshift # Go through all to-transcribe data sets and copy them out for dset in dataset_list: # Make sure that the output data set has the expected shape if output_dict[dset].shape[0] != len(bpart_ids): print(f"Inconsistent shape of BH output array '{dset}'.") set_trace() # Load the data, make sure this actually worked data = hd.read_data(snapfile, 'PartType5/' + dset) if data is None: print(f"Oh my goodness, why can we now not find data set " f"'{dset}' for black holes in output {isnap}?") set_trace() output_dict[dset][bh_ids, ..., iisnap] = data if iisnap % 50 == 0: print(f"... finished in {time.time() - stime:.3f} sec.")
def connect_to_galaxies(bpart_ids, args): """Connect black holes to galaxies at z = 0.""" if args.vr_snap is None: print("Skipping galaxy linking on your request...") return if args.combined_vr: args.vr_particles = args.wdir + f'{args.vr_file}_{args.vr_snap:04d}_particles.hdf5' args.vr_outfile = args.wdir + f'{args.vr_file}_{args.vr_snap:04d}.hdf5' else: print("Please transcribe VR catalogue...") set_trace() aexp = float( hd.read_attribute(args.vr_outfile, 'SimulationInfo', 'ScaleFactor')) args.vr_zred = 1 / aexp - 1 args.vr_aexp = aexp print(f"Connecting to VR snapshot {args.vr_snap} at redshift " f"{args.vr_zred}...") # Load VR particle IDs vr_ids = hd.read_data(args.vr_particles, 'Haloes/IDs') vr_nums = hd.read_data(args.vr_particles, 'Haloes/Numbers') vr_offsets = hd.read_data(args.vr_particles, 'Haloes/Offsets') # Locate 'our' BHs in the VR ID list print("Locating BHs in VR list...") stime = time.time() ind_in_vr, found_in_vr = hx.find_id_indices(bpart_ids, vr_ids) print(f"... took {(time.time() - stime):.3f} sec., located " f"{len(found_in_vr)} " f"/ {len(bpart_ids)} BHs in VR list " f"({len(found_in_vr)/len(bpart_ids)*100:.3f}%).") # Now convert VR index to halo bh_halo = np.zeros(len(bpart_ids), dtype=int) - 1 halo_guess = np.searchsorted( vr_offsets, ind_in_vr[found_in_vr], side='right') - 1 ind_good = np.nonzero(ind_in_vr[found_in_vr] < (vr_offsets[halo_guess] + vr_nums[halo_guess]))[0] bh_halo[found_in_vr[ind_good]] = halo_guess[ind_good] print(f"... could match {len(ind_good)} / {len(bpart_ids)} BHs to haloes. " f"({len(ind_good)/len(bpart_ids)*100:.3f}%).") gal_props = {'halo': bh_halo} # Add a few key properties of the haloes, for convenience ind_in_halo = found_in_vr[ind_good] vr_mstar = hd.read_data(args.vr_outfile, 'ApertureMeasurements/30kpc/Stars/Masses') vr_sfr = hd.read_data(args.vr_outfile, 'ApertureMeasurements/30kpc/SFR/') vr_m200c = hd.read_data(args.vr_outfile, 'M200crit') vr_haloTypes = hd.read_data(args.vr_outfile, 'StructureTypes') gal_props['MStar'] = np.zeros(len(bpart_ids)) gal_props['SFR'] = np.zeros(len(bpart_ids)) gal_props['M200'] = np.zeros(len(bpart_ids)) gal_props['HaloTypes'] = np.zeros(len(bpart_ids), dtype=int) gal_props['MStar'][ind_in_halo] = vr_mstar[bh_halo[ind_in_halo]] gal_props['SFR'][ind_in_halo] = vr_sfr[bh_halo[ind_in_halo]] gal_props['M200'][ind_in_halo] = vr_m200c[bh_halo[ind_in_halo]] gal_props['HaloTypes'][ind_in_halo] = vr_haloTypes[bh_halo[ind_in_halo]] return gal_props
def image_snap(isnap): """Main function to image one specified snapshot.""" print(f"Beginning imaging snapshot {isnap}...") stime = time.time() plotloc = (args.rootdir + f'{args.outdir}/image_pt{args.ptype}_{args.imtype}_' f'{args.coda}_') if args.cambhbid is not None: plotloc = plotloc + f'BH-{args.cambhbid}_' if not os.path.isdir(os.path.dirname(plotloc)): os.makedirs(os.path.dirname(plotloc)) if not args.replot_existing and os.path.isfile( f'{plotloc}{isnap:04d}.png'): print(f"Image {plotloc}{isnap:04d}.png already exists, skipping.") return snapdir = args.rootdir + f'{args.snap_name}_{isnap:04d}.hdf5' mask = sw.mask(snapdir) # Read metadata print("Read metadata...") boxsize = max(mask.metadata.boxsize.value) ut = hd.read_attribute(snapdir, 'Units', 'Unit time in cgs (U_t)')[0] um = hd.read_attribute(snapdir, 'Units', 'Unit mass in cgs (U_M)')[0] time_int = hd.read_attribute(snapdir, 'Header', 'Time')[0] aexp_factor = hd.read_attribute(snapdir, 'Header', 'Scale-factor')[0] zred = hd.read_attribute(snapdir, 'Header', 'Redshift')[0] num_part = hd.read_attribute(snapdir, 'Header', 'NumPart_Total') time_gyr = time_int * ut / (3600 * 24 * 365.24 * 1e9) mdot_factor = (um / 1.989e33) / (ut / (3600 * 24 * 365.24)) # ----------------------- # Snapshot-specific setup # ----------------------- # Camera position camPos = None if vr_halo >= 0: print("Reading camera position from VR catalogue...") vr_file = args.rootdir + f'vr_{isnap:04d}.hdf5' camPos = hd.read_data(vr_file, 'MinimumPotential/Coordinates') elif args.varpos is not None: print("Find camera position...") if len(args.varpos) != 6: print("Need 6 arguments for moving box") set_trace() camPos = np.array([ args.varpos[0] + args.varpos[3] * time_gyr, args.varpos[1] + args.varpos[4] * time_gyr, args.varpos[2] + args.varpos[5] * time_gyr ]) print(camPos) camPos *= aexp_factor elif args.campos is not None: camPos = np.array(args.campos) * aexp_factor elif args.campos_phys is not None: camPos = np.array(args.campos) elif args.cambhid is not None: all_bh_ids = hd.read_data(snapdir, 'PartType5/ParticleIDs') args.cambh = np.nonzero(all_bh_ids == args.cambhid)[0] if len(args.cambh) == 0: print(f"BH ID {args.cambhid} does not exist, skipping.") return if len(args.cambh) != 1: print(f"Could not unambiguously find BH ID '{args.cambhid}'!") set_trace() args.cambh = args.cambh[0] if args.cambh is not None and camPos is None: camPos = hd.read_data(snapdir, 'PartType5/Coordinates', read_index=args.cambh) * aexp_factor args.hsml = hd.read_data( snapdir, 'PartType5/SmoothingLengths', read_index=args.cambh) * aexp_factor * kernel_gamma elif camPos is None: print("Setting camera position to box centre...") camPos = np.array([0.5, 0.5, 0.5]) * boxsize * aexp_factor # Image size conversion, if necessary if not args.propersize: args.imsize = args.realimsize * aexp_factor args.zsize = args.realzsize * aexp_factor else: args.imsize = args.realimsize args.zsize = args.realzsize max_sel = 1.2 * np.sqrt(3) * max(args.imsize, args.zsize) extent = np.array([-1, 1, -1, 1]) * args.imsize # Set up loading region if max_sel < boxsize * aexp_factor / 2: load_region = np.array( [[camPos[0] - args.imsize * 1.2, camPos[0] + args.imsize * 1.2], [camPos[1] - args.imsize * 1.2, camPos[1] + args.imsize * 1.2], [camPos[2] - args.zsize * 1.2, camPos[2] + args.zsize * 1.2]]) load_region = sw.cosmo_array(load_region / aexp_factor, "Mpc") mask.constrain_spatial(load_region) data = sw.load(snapdir, mask=mask) else: data = sw.load(snapdir) pt_names = ['gas', 'dark_matter', None, None, 'stars', 'black_holes'] datapt = getattr(data, pt_names[args.ptype]) pos = datapt.coordinates.value * aexp_factor # Next bit does periodic wrapping def flip_dim(idim): full_box_phys = boxsize * aexp_factor half_box_phys = boxsize * aexp_factor / 2 if camPos[idim] < min(max_sel, half_box_phys): ind_high = np.nonzero(pos[:, idim] > half_box_phys)[0] pos[ind_high, idim] -= full_box_phys elif camPos[idim] > max(full_box_phys - max_sel, half_box_phys): ind_low = np.nonzero(pos[:, idim] < half_box_phys)[0] pos[ind_low, idim] += full_box_phys for idim in range(3): print(f"Periodic wrapping in dimension {idim}...") flip_dim(idim) rad = np.linalg.norm(pos - camPos[None, :], axis=1) ind_sel = np.nonzero(rad < max_sel)[0] pos = pos[ind_sel, :] # Read BH properties, if they exist if num_part[5] > 0 and not args.nobh: bh_hsml = (hd.read_data(snapdir, 'PartType5/SmoothingLengths') * aexp_factor) bh_pos = hd.read_data(snapdir, 'PartType5/Coordinates') * aexp_factor bh_mass = hd.read_data(snapdir, 'PartType5/SubgridMasses') * 1e10 bh_maccr = (hd.read_data(snapdir, 'PartType5/AccretionRates') * mdot_factor) bh_id = hd.read_data(snapdir, 'PartType5/ParticleIDs') bh_nseed = hd.read_data(snapdir, 'PartType5/CumulativeNumberOfSeeds') bh_ft = hd.read_data(snapdir, 'PartType5/FormationScaleFactors') print(f"Max BH mass: {np.log10(np.max(bh_mass))}") else: bh_mass = None # Dummy value # Read the appropriate 'mass' quantity if args.ptype == 0 and args.imtype == 'sfr': mass = datapt.star_formation_rates[ind_sel] mass.convert_to_units(unyt.Msun / unyt.yr) mass = np.clip(mass.value, 0, None) # Don't care about last SFR aExp else: mass = datapt.masses[ind_sel] mass.convert_to_units(unyt.Msun) mass = mass.value if args.ptype == 0: hsml = (datapt.smoothing_lengths.value[ind_sel] * aexp_factor * kernel_gamma) elif fixedSmoothingLength > 0: hsml = np.zeros(mass.shape[0], dtype=np.float32) + fixedSmoothingLength else: hsml = None if args.imtype == 'temp': quant = datapt.temperatures.value[ind_sel] elif args.imtype == 'diffusion_parameters': quant = datapt.diffusion_parameters.value[ind_sel] else: quant = mass # Read quantities for gri computation if necessary if args.ptype == 4 and args.imtype == 'gri': m_init = datapt.initial_masses.value[ind_sel] * 1e10 # in M_sun z_star = datapt.metal_mass_fractions.value[ind_sel] sft = datapt.birth_scale_factors.value[ind_sel] age_star = (time_gyr - hy.aexp_to_time(sft, time_type='age')) * 1e9 age_star = np.clip(age_star, 0, None) # Avoid rounding issues lum_g = et.imaging.stellar_luminosity(m_init, z_star, age_star, 'g') lum_r = et.imaging.stellar_luminosity(m_init, z_star, age_star, 'r') lum_i = et.imaging.stellar_luminosity(m_init, z_star, age_star, 'i') # --------------------- # Generate actual image # --------------------- xBase = np.zeros(3, dtype=np.float32) yBase = np.copy(xBase) zBase = np.copy(xBase) if args.imtype == 'gri': image_weight_all_g, image_quant, hsml = ir.make_sph_image_new_3d( pos, lum_g, lum_g, hsml, DesNgb=desNGB, imsize=args.numpix, zpix=1, boxsize=args.imsize, CamPos=camPos, CamDir=camDir, ProjectionPlane=projectionPlane, verbose=True, CamAngle=[0, 0, rho], rollMode=0, edge_on=edge_on, treeAllocFac=10, xBase=xBase, yBase=yBase, zBase=zBase, return_hsml=True) image_weight_all_r, image_quant = ir.make_sph_image_new_3d( pos, lum_r, lum_r, hsml, DesNgb=desNGB, imsize=args.numpix, zpix=1, boxsize=args.imsize, CamPos=camPos, CamDir=camDir, ProjectionPlane=projectionPlane, verbose=True, CamAngle=[0, 0, rho], rollMode=0, edge_on=edge_on, treeAllocFac=10, xBase=xBase, yBase=yBase, zBase=zBase, return_hsml=False) image_weight_all_i, image_quant = ir.make_sph_image_new_3d( pos, lum_i, lum_i, hsml, DesNgb=desNGB, imsize=args.numpix, zpix=1, boxsize=args.imsize, CamPos=camPos, CamDir=camDir, ProjectionPlane=projectionPlane, verbose=True, CamAngle=[0, 0, rho], rollMode=0, edge_on=edge_on, treeAllocFac=10, xBase=xBase, yBase=yBase, zBase=zBase, return_hsml=False) map_maas_g = -5 / 2 * np.log10(image_weight_all_g[:, :, 1] + 1e-15) + 5 * np.log10( 180 * 3600 / np.pi) + 25 map_maas_r = -5 / 2 * np.log10(image_weight_all_r[:, :, 1] + 1e-15) + 5 * np.log10( 180 * 3600 / np.pi) + 25 map_maas_i = -5 / 2 * np.log10(image_weight_all_i[:, :, 1] + 1e-15) + 5 * np.log10( 180 * 3600 / np.pi) + 25 else: image_weight_all, image_quant = ir.make_sph_image_new_3d( pos, mass, quant, hsml, DesNgb=desNGB, imsize=args.numpix, zpix=1, boxsize=args.imsize, CamPos=camPos, CamDir=camDir, ProjectionPlane=projectionPlane, verbose=True, CamAngle=[0, 0, rho], rollMode=0, edge_on=edge_on, treeAllocFac=10, xBase=xBase, yBase=yBase, zBase=zBase, zrange=[-args.zsize, args.zsize]) # Extract surface density in M_sun [/yr] / kpc^2 sigma = np.log10(image_weight_all[:, :, 1] + 1e-15) - 6 if args.ptype == 0 and args.imtype in ['temp']: tmap = np.log10(image_quant[:, :, 1]) elif args.ptype == 0 and args.imtype in ['diffusion_parameters']: tmap = image_quant[:, :, 1] # ----------------- # Save image data # ----------------- if save_maps: maploc = plotloc + f'{isnap:04d}.hdf5' if args.imtype == 'gri' and args.ptype == 4: hd.write_data(maploc, 'g_maas', map_maas_g, new=True) hd.write_data(maploc, 'r_maas', map_maas_r) hd.write_data(maploc, 'i_maas', map_maas_i) else: hd.write_data(maploc, 'Sigma', sigma, new=True) if args.ptype == 0 and args.imtype == 'temp': hd.write_data(maploc, 'Temperature', tmap) elif args.ptype == 0 and args.imtype == 'diffusion_parameters': hd.write_data(maploc, 'DiffusionParameters', tmap) hd.write_data(maploc, 'Extent', extent) hd.write_attribute(maploc, 'Header', 'CamPos', camPos) hd.write_attribute(maploc, 'Header', 'ImSize', args.imsize) hd.write_attribute(maploc, 'Header', 'NumPix', args.numpix) hd.write_attribute(maploc, 'Header', 'Redshift', 1 / aexp_factor - 1) hd.write_attribute(maploc, 'Header', 'AExp', aexp_factor) hd.write_attribute(maploc, 'Header', 'Time', time_gyr) if bh_mass is not None: hd.write_data(maploc, 'BH_pos', bh_pos - camPos[None, :], comment='Relative position of BHs') hd.write_data(maploc, 'BH_mass', bh_mass, comment='Subgrid mass of BHs') hd.write_data( maploc, 'BH_maccr', bh_maccr, comment='Instantaneous BH accretion rate in M_sun/yr') hd.write_data(maploc, 'BH_id', bh_id, comment='Particle IDs of BHs') hd.write_data(maploc, 'BH_nseed', bh_nseed, comment='Number of seeds in each BH') hd.write_data(maploc, 'BH_aexp', bh_ft, comment='Formation scale factor of each BH') # ------------- # Plot image... # ------------- if not args.noplot: print("Obtained image, plotting...") fig = plt.figure(figsize=(args.inch, args.inch)) ax = fig.add_axes([0.0, 0.0, 1.0, 1.0]) plt.sca(ax) # Option I: we have really few particles. Plot them individually: if pos.shape[0] < 32: plt.scatter(pos[:, 0] - camPos[0], pos[:, 1] - camPos[1], color='white') else: # Main plotting regime # Case A: gri image -- very different from rest if args.ptype == 4 and args.imtype == 'gri': vmin = -args.scale[0] + np.array([-0.5, -0.25, 0.0]) vmax = -args.scale[1] + np.array([-0.5, -0.25, 0.0]) clmap_rgb = np.zeros((args.numpix, args.numpix, 3)) clmap_rgb[:, :, 2] = np.clip( ((-map_maas_g) - vmin[0]) / ((vmax[0] - vmin[0])), 0, 1) clmap_rgb[:, :, 1] = np.clip( ((-map_maas_r) - vmin[1]) / ((vmax[1] - vmin[1])), 0, 1) clmap_rgb[:, :, 0] = np.clip( ((-map_maas_i) - vmin[2]) / ((vmax[2] - vmin[2])), 0, 1) im = plt.imshow(clmap_rgb, extent=extent, aspect='equal', interpolation='nearest', origin='lower', alpha=1.0) else: # Establish image scaling if not args.absscale: ind_use = np.nonzero(sigma > 1e-15) vrange = np.percentile(sigma[ind_use], args.scale) else: vrange = args.scale print(f'Sigma range: {vrange[0]:.4f} -- {vrange[1]:.4f}') # Case B: temperature/diffusion parameter image if (args.ptype == 0 and args.imtype in ['temp', 'diffusion_parameters'] and not args.no_double_image): if args.imtype == 'temp': cmap = None elif args.imtype == 'diffusion_parameters': cmap = cmocean.cm.haline clmap_rgb = ir.make_double_image( sigma, tmap, percSigma=vrange, absSigma=True, rangeQuant=args.quantrange, cmap=cmap) im = plt.imshow(clmap_rgb, extent=extent, aspect='equal', interpolation='nearest', origin='lower', alpha=1.0) else: # Standard sigma images if args.ptype == 0: if args.imtype == 'hi': cmap = plt.cm.bone elif args.imtype == 'sfr': cmap = plt.cm.magma elif args.imtype == 'diffusion_parameters': cmap = cmocean.cm.haline else: cmap = plt.cm.inferno elif args.ptype == 1: cmap = plt.cm.Greys_r elif args.ptype == 4: cmap = plt.cm.bone if args.no_double_image: plotquant = tmap vmin, vmax = args.quantrange[0], args.quantrange[1] else: plotquant = sigma vmin, vmax = vrange[0], vrange[1] im = plt.imshow(plotquant, cmap=cmap, extent=extent, vmin=vmin, vmax=vmax, origin='lower', interpolation='nearest', aspect='equal') # Plot BHs if desired: if show_bhs and bh_mass is not None: if args.bh_file is not None: bh_inds = np.loadtxt(args.bh_file, dtype=int) else: bh_inds = np.arange(bh_pos.shape[0]) ind_show = np.nonzero( (np.abs(bh_pos[bh_inds, 0] - camPos[0]) < args.imsize) & (np.abs(bh_pos[bh_inds, 1] - camPos[1]) < args.imsize) & (np.abs(bh_pos[bh_inds, 2] - camPos[2]) < args.zsize) & (bh_ft[bh_inds] >= args.bh_ftrange[0]) & (bh_ft[bh_inds] <= args.bh_ftrange[1]) & (bh_mass[bh_inds] >= 10.0**args.bh_mrange[0]) & (bh_mass[bh_inds] <= 10.0**args.bh_mrange[1]))[0] ind_show = bh_inds[ind_show] if args.bh_quant == 'mass': sorter = np.argsort(bh_mass[ind_show]) sc = plt.scatter(bh_pos[ind_show[sorter], 0] - camPos[0], bh_pos[ind_show[sorter], 1] - camPos[1], marker='o', c=np.log10(bh_mass[ind_show[sorter]]), edgecolor='grey', vmin=5.0, vmax=args.bh_mmax, s=5.0, linewidth=0.2) bticks = np.linspace(5.0, args.bh_mmax, num=6, endpoint=True) blabel = r'log$_{10}$ ($m_\mathrm{BH}$ [M$_\odot$])' elif args.bh_quant == 'formation': sorter = np.argsort(bh_ft[ind_show]) sc = plt.scatter(bh_pos[ind_show[sorter], 0] - camPos[0], bh_pos[ind_show[sorter], 1] - camPos[1], marker='o', c=bh_ft[ind_show[sorter]], edgecolor='grey', vmin=0, vmax=1.0, s=5.0, linewidth=0.2) bticks = np.linspace(0.0, 1.0, num=6, endpoint=True) blabel = 'Formation scale factor' if args.bhind: for ibh in ind_show[sorter]: c = plt.cm.viridis( (np.log10(bh_mass[ibh]) - 5.0) / (args.bh_mmax - 5.0)) plt.text(bh_pos[ibh, 0] - camPos[0] + args.imsize / 200, bh_pos[ibh, 1] - camPos[1] + args.imsize / 200, f'{ibh}', color=c, fontsize=4, va='bottom', ha='left') if args.draw_hsml: phi = np.arange(0, 2.01 * np.pi, 0.01) plt.plot(args.hsml * np.cos(phi), args.hsml * np.sin(phi), color='white', linestyle=':', linewidth=0.5) # Add colour bar for BH masses if args.imtype != 'sfr': ax2 = fig.add_axes([0.6, 0.07, 0.35, 0.02]) ax2.set_xticks([]) ax2.set_yticks([]) cbar = plt.colorbar(sc, cax=ax2, orientation='horizontal', ticks=bticks) cbar.ax.tick_params(labelsize=8) fig.text(0.775, 0.1, blabel, rotation=0.0, va='bottom', ha='center', color='white', fontsize=8) # Done with main image, some embellishments... plt.sca(ax) plt.text(-0.045 / 0.05 * args.imsize, 0.045 / 0.05 * args.imsize, 'z = {:.3f}'.format(1 / aexp_factor - 1), va='center', ha='left', color='white') plt.text(-0.045 / 0.05 * args.imsize, 0.041 / 0.05 * args.imsize, 't = {:.3f} Gyr'.format(time_gyr), va='center', ha='left', color='white', fontsize=8) plot_bar() # Plot colorbar for SFR if appropriate if args.ptype == 0 and args.imtype == 'sfr': ax2 = fig.add_axes([0.6, 0.07, 0.35, 0.02]) ax2.set_xticks([]) ax2.set_yticks([]) scc = plt.scatter([-1e10], [-1e10], c=[0], cmap=plt.cm.magma, vmin=vrange[0], vmax=vrange[1]) cbar = plt.colorbar(scc, cax=ax2, orientation='horizontal', ticks=np.linspace(np.floor(vrange[0]), np.ceil(vrange[1]), 5, endpoint=True)) cbar.ax.tick_params(labelsize=8) fig.text( 0.775, 0.1, r'log$_{10}$ ($\Sigma_\mathrm{SFR}$ [M$_\odot$ yr$^{-1}$ kpc$^{-2}$])', rotation=0.0, va='bottom', ha='center', color='white', fontsize=8) ax.set_xlabel(r'$\Delta x$ [pMpc]') ax.set_ylabel(r'$\Delta y$ [pMpc]') ax.set_xlim((-args.imsize, args.imsize)) ax.set_ylim((-args.imsize, args.imsize)) plt.savefig(plotloc + str(isnap).zfill(4) + '.png', dpi=args.numpix / args.inch) plt.close() print(f"Finished snapshot {isnap} in {(time.time() - stime):.2f} sec.") print(f"Image saved in {plotloc}{isnap:04d}.png")
set_trace() bh_ids = hd.read_data(datafile_ref, 'PartType5/ParticleIDs') args.index = np.nonzero(bh_ids == args.id)[0] if len(args.index) != 1: print(f"Could not unambiguously find BH ID {args.id}...") set_trace() args.index = args.index[0] bh_id = args.id bh_ft = hd.read_data(datafile_ref, 'PartType5/FormationScaleFactors', read_index=args.index) bh_pos = hd.read_data(datafile_ref, 'PartType5/Coordinates', read_index=args.index) aexp_ref = hd.read_attribute(datafile_ref, 'Header', 'Scale-factor')[0] print(f"BH ID {bh_id} formed at aexp={bh_ft:.4f}") print(f"BH position at snap {args.snapshot}: ", bh_pos) # Set the formation time range of possible BH matches search_aexp = np.array([bh_ft - 0.1, bh_ft + 0.1]) search_aexp = np.clip(search_aexp, 0, min(1, aexp_ref)) datafile_end = None for isnap in range(args.snapshot + 1): datafile_end = wdir1 + f'/{args.name1}_{isnap:04d}.hdf5' if not os.path.isfile(datafile_end): continue
def process_sim(isim, args): """Process one specific simulation.""" if args.have_full_sim_dir: args.wdir = isim else: args.wdir = xl.get_sim_dir(args.base_dir, isim) print(f"Analysing simulation {args.wdir}...") # Name of the input catalogue, containing all the data to plot args.catloc = f'{args.wdir}{args.bh_file}' # Select BHs in this sim args.plotdata_file = f'{args.wdir}{args.vrplot_prefix}.hdf5' if os.path.isfile(args.plotdata_file): bh_list = hd.read_data(args.plotdata_file, 'BlackHoleBIDs') select_list = None elif args.bh_bid is not None: select_list = None bh_list = args.bh_bid else: # Find BHs we are intereste in, load data select_list = [["Halo_MStar", '>=', args.halo_mstar_range[0]], ["Halo_MStar", '<', args.halo_mstar_range[1]], ["Halo_M200c", '>=', args.halo_m200_range[0]], ["Halo_M200c", '<', args.halo_m200_range[1]]] if not args.include_subdominant_bhs: select_list.append(['Flag_MostMassiveInHalo', '==', 1]) if not args.include_satellites: select_list.append(['HaloTypes', '==', 10]) if args.bh_mass_range is not None: zreds = hd.read_data(args.wdir + args.bh_file, 'Redshifts') best_index = np.argmin(np.abs(zreds - args.bh_selection_redshift)) print(f"Best index for redshift {args.bh_selection_redshift} is " f"{best_index}.") # Subgrid masses are in 10^10 M_sun, so need to adjust selection # range select_list.append([ 'SubgridMasses', '>=', args.bh_mass_range[0] / 1e10, best_index ]) select_list.append([ 'SubgridMasses', '<=', args.bh_mass_range[1] / 1e10, best_index ]) bh_list = None bh_file = args.wdir + args.bh_file bh_data, bh_list = xl.lookup_bh_data(bh_file, bh_props_list, select_list, bh_list) args.nsnap = len(bh_data['Times']) # Extract meta-data from Header bh_data['CodeBranch'] = hd.read_attribute(bh_file, 'Code', 'Git Branch') bh_data['CodeDate'] = hd.read_attribute(bh_file, 'Code', 'Git Date') bh_data['CodeRev'] = hd.read_attribute(bh_file, 'Code', 'Git Revision') bh_data['SimName'] = hd.read_attribute(bh_file, 'Header', 'RunName') # Look up stars data stars = Stars(args) for ibh in bh_list: process_bh(args, stars, bh_data, ibh, isim)