def check_sage_file(SAGE_fname, simulation="Kali"): """ Goes through the specified SAGE file and does a number of checks. Parameters ---------- SAGE_fname: String. Required. Name of the SAGE file to check. simulation: String. Default: Kali. Name of the simulation SAGE used. Returns ---------- None. If a check fails, the program will exit. """ if simulation == "Kali": AllVars.Set_Params_Kali() elif simulation == "MiniMill": AllVars.Set_Params_MiniMill() else: print("A valid simulation has not been selected when calling " "`check_sage_file.") raise ValueError Gals, Gals_Desc = ReadScripts.ReadGals_SAGE(SAGE_fname, None, len(AllVars.SnapZ)) NTrees = ReadScripts.Read_SAGE_header(SAGE_fname, None) check_centrals(Gals, NTrees)
def adjust_ini(): """ Takes a base ``SAGE`` and ``cifog`` ``.ini`` files and prompts the user to update them. ..note:: Only paths and filenames are updated. This function does not touch recipe parameters, constants or simulation variables. Parameters ---------- None. Returns ---------- None. The updated ``.ini`` files are saved to the ``ini_files`` directory of the run directory specified by the user. """ # First get the base ini files that we'll use. base_SAGE_ini = "{0}/../ini_files/kali_SAGE.ini".format(script_dir) base_cifog_ini = "{0}/../ini_files/kali_cifog.ini".format(script_dir) # Prompt for a SAGE ini path. If none provided, use the base. my_SAGE_ini = input("Template SAGE ini file [default: " "{0}]: ".format(base_SAGE_ini)) if not my_SAGE_ini: my_SAGE_ini = base_SAGE_ini # Do the same for cifog. my_cifog_ini = input("Template cifog ini file [default: " "{0}]: ".format(base_cifog_ini)) if not my_cifog_ini: my_cifog_ini = base_cifog_ini SAGE_params = rs.read_SAGE_ini(my_SAGE_ini) cifog_params, cifog_headers = rs.read_cifog_ini(my_cifog_ini) # This directory will house all the output from RSAGE. run_directory = None while not run_directory: run_directory = input("Base output directory: ") if not run_directory: print("Must be specified") # Update the fields based on user input. SAGE_fields_update = update_SAGE_dict(SAGE_params) cifog_fields_update = update_cifog_dict(cifog_params) # Create all the directories and ini files. cp.create_directories(run_directory) cp.update_ini_files(base_SAGE_ini, base_cifog_ini, SAGE_fields_update, cifog_fields_update, run_directory) print("ini files for both SAGE and cifog have been created and placed " "into {0}/ini_files".format(run_directory))
def read_grids(SAGE_params, cifog_params, snapshot, reshape=False): """ Reads the grids for a specific ``RSAGE`` run. Parameters ---------- SAGE_params : Dictionary Dictionary keyed by the ``SAGE`` parameter field names and containing the values from the ``.ini`` file. See ``read_SAGE_ini`` in ``output/ReadScripts.py`` for full details. cifog_params : Dictionary Dictionary keyed by the ``cifog`` parameter field names and containing the values from the ``.ini`` file. See ``read_cifog_ini`` in ``output/ReadScripts.py`` for full details. reshape : Boolean, default False Controls whether the grids should be recast to NxNxN arrays (True) or kept as 1D arrays (False). """ print("Reading the nionHI, XHII and photHI grids for the test run.") RunPrefix = SAGE_params["RunPrefix"] RunPrefix = SAGE_params["RunPrefix"] OutputDir = SAGE_params["OutputDir"] GridSize = int(SAGE_params["GridSize"]) # Here the precision is 1 for float, 2 for double. XHII and photHI are # hardcoded to be in double. nion_prefix = get_nion_prefix(SAGE_params) nion_path = "{0}/grids/nion/{1}_{2}_nionHI_{3:03d}".format( OutputDir, RunPrefix, nion_prefix, snapshot) nion_precision = int(cifog_params["nionFilesAreInDoublePrecision"]) + 1 nion_grid = ReadScripts.read_binary_grid(nion_path, GridSize, nion_precision, reshape=reshape) XHII_path = "{0}/grids/cifog/{1}_XHII_{2:03d}".format( OutputDir, RunPrefix, snapshot) XHII_precision = 2 XHII_grid = ReadScripts.read_binary_grid(XHII_path, GridSize, XHII_precision, reshape=reshape) photHI_path = "{0}/grids/cifog/{1}_photHI_{2:03d}".format( OutputDir, RunPrefix, snapshot) photHI_precision = 2 photHI_grid = ReadScripts.read_binary_grid(photHI_path, GridSize, photHI_precision, reshape=reshape) return nion_grid, XHII_grid, photHI_grid
def load_gals(max_snap, galaxy_name="test"): # First check that the output of the test run can be read. gal_name = "{0}/test_output/galaxies/{1}_z5.829".format( test_dir, galaxy_name) Gals, Gals_Desc = ReadScripts.ReadGals_SAGE(gal_name, 0, max_snap + 1) gal_name = "{0}/test_output/galaxies/{1}_MergedGalaxies".format( test_dir, galaxy_name) Gals_Merged, _ = ReadScripts.ReadGals_SAGE(gal_name, 0, max_snap + 1) Gals = ReadScripts.Join_Arrays(Gals, Gals_Merged, Gals_Desc) # Gals is now a recarray containing all galaxies at all snapshots. return Gals
def get_subfind_halos(path, snap): fname = "{0}_{1:03d}.catalog_subgroups_properties/subfind_{1:03d}.catalog_subgroups_properties.0" \ .format(path, snap) Halos = ReadScripts.read_subfind_halos(fname) return Halos
def calc_gal_zreion(GridPos, zreion_fname, GridSize, Precision, debug=0): zreion = ReadScripts.read_binary_grid(zreion_fname, GridSize, Precision, reshape=False) gal_zreion = zreion[GridPos] return gal_zreion
def calc_gal_photoion(GridPos, PhotoField_fname, GridSize, Precision, debug=0): photoion = ReadScripts.read_binary_grid(PhotoField_fname, GridSize, Precision, reshape=False) gal_photoion = photoion[GridPos] if debug: print("There are {0} Cells with a non-zero photion and there are {1} " "unique Galaxy cells".format(len(photoion[photoion > 1e-16]), len(np.unique(GridPos)))) return gal_photoion
def determine_close_idx(fname_HII, fname_density, SnapList, GridSize, precision, target_XHI_fraction, model_tags): XHII_fraction = np.zeros_like(SnapList, dtype=np.float32) for model_number in range(len(fname_HII)): for snapnum in range(len(SnapList[model_number])): HII_fname = "{0}_{1:03d}".format(fname_HII[model_number], SnapList[model_number][snapnum]) HII = ReadScripts.read_binary_grid(HII_fname, GridSize[model_number], precision[model_number]) density_fname = "{0}{1:03d}.dens.dat".format(fname_density[model_number], SnapList[model_number][snapnum]) density = ReadScripts.read_binary_grid(density_fname, GridSize[model_number], precision[model_number]) HI_frac = calculate_HI_frac(HII, density) XHII_fraction[model_number][snapnum] = HI_frac SnapList = [] for model_number in range(len(fname_HII)): SnapList.append([]) print("Model {0}".format(model_tags[model_number])) for val in target_XHI_fraction: idx = (np.abs(XHII_fraction[model_number] - val)).argmin() print("HI Fract {0}: Nearest Idx {1} with value {2}".format(val, idx, XHII_fraction[model_number][idx])) SnapList[model_number].append(idx) return SnapList
def test_run(): """ Wrapper to run all the tests. Parameters ---------- None. Returns ---------- None. """ print("") print("Welcome to the RSAGE testing funhouse!") print("") downloaded_repo = get_trees() # Download Kali tree if we needed # We have multiple test parameter specs we want to test. # Need all the names of the ini files and the name of the galaxies they # produce. ini_files = ["kali512"] for ini_file in ini_files: # First read the ini file to get the runtime parameters. path_to_sage_ini = "{0}/test_ini_files/{1}_SAGE.ini".format( test_dir, ini_file) path_to_cifog_ini = "{0}/test_ini_files/{1}_cifog.ini".format( test_dir, ini_file) SAGE_params = ReadScripts.read_SAGE_ini(path_to_sage_ini) cifog_params, cifog_headers = ReadScripts.read_cifog_ini( path_to_cifog_ini, SAGE_params) run_prefix = SAGE_params["RunPrefix"] max_snap = int(SAGE_params["LastSnapShotNr"]) # Make sure all the directories we need are present. # SAGE itself will take care of the directories for the results. check_sage_dirs(run_prefix) # Then run SAGE. run_my_sage(ini_file) print("") print("SAGE run, now reading in the Galaxies.") print("") # Read the results and check the stellar mass function. Gals = load_gals(max_snap, run_prefix) check_smf(Gals, run_prefix, max_snap, SAGE_params) # Read the grids and check. snapshot = 61 nion_grid, XHII_grid, photHI_grid = read_grids(SAGE_params, cifog_params, snapshot) check_grids(nion_grid, XHII_grid, photHI_grid, SAGE_params, cifog_params, snapshot) print("Done") print("") #cleanup(downloaded_repo) print("") print("================") print("All tests passed") print("================") print("")
def check_grids(nion_grid, XHII_grid, photHI_grid, SAGE_params, cifog_params, snapshot, tol=0.01, update_data=0): print("") print("Now checking that the nionHI, XHII and photHI grids match the test " "data.") RunPrefix = SAGE_params["RunPrefix"] nion_precision = int(cifog_params["nionFilesAreInDoublePrecision"]) + 1 GridSize = int(SAGE_params["GridSize"]) tags = ["nionHI", "XHII", "photHI"] grids = [nion_grid, XHII_grid, photHI_grid] precisions = [nion_precision, 2, 2] # XHII and photHI hard code as double. if update_data: print("=======================================================") print("WARNING WARNING WARNING WARNING WARNING WARNING WARNING") print("=======================================================") print("=======================================================") print("WARNING WARNING WARNING WARNING WARNING WARNING WARNING") print("=======================================================") input("YOU ARE ABOUT TO OVERWRITE THE GRID TEST DATA. IF THIS IS WHAT " "YOU WANT, PRESS ENTER OTHERWISE CTRL-C TO GET OUTTA HERE!") input("JUST CHECKING ONCE MORE!") for (grid, tag, precision) in zip(grids, tags, precions): fname = "{0}/{1}/kali512/data/{2}_test_{3}_{4:03d}"\ .format(test_dir, test_datadir, RunPrefix, tag, snapshot) # We are passed a 3D array, need to save it as 1D binary. np.savetxt(fname, grid) print("Saved {0} grid data as {1}".format(tag, fname)) print( "All grid test data updated. Exiting checking now (because it'll " "obviously be correct.)") return # Now let's compare the grids to the test data. for (grid, tag, precision) in zip(grids, tags, precisions): fname = "{0}/{1}/kali512/data/{2}_test_{3}_{4:03d}"\ .format(test_dir, test_datadir, RunPrefix, tag, snapshot) test_grid = ReadScripts.read_binary_grid(fname, GridSize, precision, reshape=False) diff = np.abs(grid - test_grid) if len(diff[diff > 1e-8]) > 0: print("Found that the {0} grids disagreed.".format(tag)) print("The SAGE dictionary is {0}".format(SAGE_params)) print("The cifog dictionary is {0}".format(cifog_params)) print( "The mean value of the run grid is {0:.6e} compared to the mean " "value of the test grid is {1:.6e}".format( np.mean(grid), np.mean(test_grid))) print("The non-zero difference values are {0}".format( diff[diff > 0])) print( "Checking if any of these values have a fractional difference " "greater than {0}".format(tol)) greater_0 = np.where(test_grid > 0.0)[0] fractional = diff[greater_0] / test_grid[greater_0] wrong_vals = greater_0[np.where(fractional > tol)[0]] if len(wrong_vals) > 0: print("Run values {0}".format(grid[wrong_vals])) print("Correct values {0}".format(test_grid[wrong_vals])) print("Diff {0}".format(diff[wrong_vals])) print("Fractional diff {0}".format(diff[wrong_vals] / test_grid[wrong_vals])) raise ValueError print("Grids are checked and all correct!") print("") return
Redshift = np.zeros_like(SnapList, dtype=np.float32) HI_fraction_target = [0.90, 0.75, 0.50, 0.25, 0.10] for snap in range(len(SnapList)): for inner_snap in range(len(SnapList[snap])): SnapList[snap][inner_snap] += 28 Redshift[snap][inner_snap] = AllVars.SnapZ[SnapList[snap] [inner_snap]] have_data = 1 if have_data == 1: plot_bubble_MC(Redshift, HI_fraction_target, model_tags, output_tags, GridSize, OutputDir, "Bubbles_photHI2") exit() for model_number in range(rank, len(fname_ionized), size): for snapnum in range(len(SnapList[model_number])): this_snapnum = SnapList[model_number][snapnum] XHII_fname = "{0}_{1:03d}".format(fname_ionized[model_number], this_snapnum) XHII = ReadScripts.read_binary_grid(XHII_fname, GridSize[model_number], precision[model_number]) calculate_bubble_MC(AllVars.SnapZ[this_snapnum], XHII, GridSize[model_number], OutputDir, output_tags[model_number])
def generate_data(rank, size, comm, ini_files, galaxy_plots): """ Reads in the galaxy data for calculate all the require properties for each models. Parameters ---------- rank : Integer This processor rank. size : Integer The total number of processors executing the pipeline. comm : Class ``mpi4py.MPI.Intracomm`` The ``mpi4py`` communicator. ini_files : List of strings ``.ini`` file corresponding to each model that we're plotting. galaxy_plots : Dictionary Controls which of the plots we will make. Keys are the name of each plot (e.g., ``SMF``) and the value specifies if we are plotting it. If we're not plotting a property we don't need to calculate stuff for it! Returns --------- galaxy_data : Dictionary All of the calculated properties required to create the plots. """ # Binning parameters for stellar mass. mstar_bin_low = 5.0 mstar_bin_high = 12.0 mstar_bin_width = 0.2 mstar_Nbins = int((mstar_bin_high - mstar_bin_low) / mstar_bin_width) mstar_bins = np.arange(mstar_bin_low, mstar_bin_high + mstar_bin_width, mstar_bin_width) # Binning parameters for UV Magnitudes. MUV_bin_low = -24 MUV_bin_high = 5 MUV_bin_width = 0.5 MUV_Nbins = int((MUV_bin_high - MUV_bin_low) / MUV_bin_width) MUV_bins = np.arange(MUV_bin_low, MUV_bin_high + MUV_bin_width, MUV_bin_width) # ======================================================================= # # We calculate values for all models and put them into lists that are # # indexed by ``model_number``. So first we need to set up the outer-lists # # then we will append to these for each model. # # ======================================================================= # # General stuff for each model. z_array_full_allmodels = [] lookback_array_full_allmodels = [] z_array_reion_allmodels = [] lookback_array_reion_allmodels = [] cosmology_allmodels = [] t_bigbang_allmodels = [] # These are the arrays for the number of ionizing photons at each snapshot. # Note: This is the ESCAPING ionizing photons. sum_nion_allmodels = [] # Escape fraction as a function of stellar mass (Mstar). mean_mstar_fesc_allmodels = [] std_mstar_fesc_allmodels = [] N_mstar_fesc_allmodels = [] # Stellar mass function. SMF_allmodels = [] # Ejected fraction as a function of stellar mass (Mstar). mean_mstar_fej_allmodels = [] std_mstar_fej_allmodels = [] N_mstar_fej_allmodels = [] # Star formation rate as a function of stellar mass (Mstar). mean_mstar_SFR_allmodels = [] std_mstar_SFR_allmodels = [] N_mstar_SFR_allmodels = [] # UV Magnitude Luminosity Function. UVLF_allmodels = [] dustcorrected_UVLF_allmodels = [] # Dust extinction (in dex) as a function of absolute UV magnitude. mean_MUV_A1600_allmodels = [] std_MUV_A1600_allmodels = [] N_MUV_A1600_allmodels = [] # Dust mass as a function of absolute UV magnitude. mean_MUV_dustmass_allmodels = [] std_MUV_dustmass_allmodels = [] N_MUV_dustmass_allmodels = [] # All outer arrays set up, time to read in the data! for model_number, ini_file in enumerate(ini_files): # Read in the parameters and set some initial variables. SAGE_params = rs.read_SAGE_ini(ini_file) cosmology, t_bigbang = set_cosmology(float(SAGE_params["Hubble_h"]), float(SAGE_params["Omega"]), float(SAGE_params["BaryonFrac"])) cosmology_allmodels.append(cosmology) t_bigbang_allmodels.append(t_bigbang) first_snap = int(SAGE_params["LowSnap"]) last_snap = int(SAGE_params["LastSnapShotNr"]) GridSize = int(SAGE_params["GridSize"]) model_hubble_h = float(SAGE_params["Hubble_h"]) model_halopartcut = int(SAGE_params["HaloPartCut"]) model_dust_to_gas_ratio = galaxy_plots["dust_to_gas_ratio"][model_number] model_radius_dust_grains = galaxy_plots["radius_dust_grains"][model_number] model_density_dust_grains = galaxy_plots["density_dust_grains"][model_number] # Careful, volume is in Mpc^3. model_volume = pow(float(SAGE_params["BoxSize"]) / \ float(SAGE_params["Hubble_h"]),3) # Load the redshift file and calculate the lookback times. z_array_full, lookback_array_full = load_redshifts(SAGE_params["FileWithSnapList"], cosmology, t_bigbang) z_array_full = np.array(z_array_full[0:last_snap+1]) lookback_array_full = np.array(lookback_array_full[0:last_snap+1]) z_array_full_allmodels.append(z_array_full) lookback_array_full_allmodels.append(lookback_array_full) # Specifically set the redshift range to cover reionization. z_array_reion = np.array(z_array_full[first_snap:last_snap+1]) z_array_reion_allmodels.append(z_array_reion) lookback_array_reion = np.array(lookback_array_full[first_snap:last_snap+1]) lookback_array_reion_allmodels.append(lookback_array_reion) # Set up names for the galaxies. galaxy_name = "{0}/{1}_z{2:.3f}".format(SAGE_params["GalaxyOutputDir"], SAGE_params["RunPrefix"], z_array_reion[-1]) merged_name = "{0}/{1}_MergedGalaxies".format(SAGE_params["GalaxyOutputDir"], SAGE_params["RunPrefix"]) # Initialize the ionizing photon array to 0. sum_nion_allmodels.append(np.zeros(len(z_array_full), dtype=np.float32)) # Escape fraction as a function of stellar mass. mean_mstar_fesc_allmodels.append([]) std_mstar_fesc_allmodels.append([]) N_mstar_fesc_allmodels.append([]) for snap_count in range(len(z_array_full)): mean_mstar_fesc_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) std_mstar_fesc_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) N_mstar_fesc_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) # Stellar mass function. SMF_allmodels.append([]) for snap_count in range(len(z_array_full)): SMF_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) # Ejected fraction as a function of stellar mass. mean_mstar_fej_allmodels.append([]) std_mstar_fej_allmodels.append([]) N_mstar_fej_allmodels.append([]) for snap_count in range(len(z_array_full)): mean_mstar_fej_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) std_mstar_fej_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) N_mstar_fej_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) # Star formation rate as a function of stellar mass. mean_mstar_SFR_allmodels.append([]) std_mstar_SFR_allmodels.append([]) N_mstar_SFR_allmodels.append([]) for snap_count in range(len(z_array_full)): mean_mstar_SFR_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) std_mstar_SFR_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) N_mstar_SFR_allmodels[model_number].append(np.zeros(mstar_Nbins, dtype=np.float32)) # UV Luminosity Function. UVLF_allmodels.append([]) for snap_count in range(len(z_array_full)): UVLF_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) dustcorrected_UVLF_allmodels.append([]) for snap_count in range(len(z_array_full)): dustcorrected_UVLF_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) mean_MUV_A1600_allmodels.append([]) std_MUV_A1600_allmodels.append([]) N_MUV_A1600_allmodels.append([]) for snap_count in range(len(z_array_full)): mean_MUV_A1600_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) std_MUV_A1600_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) N_MUV_A1600_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) mean_MUV_dustmass_allmodels.append([]) std_MUV_dustmass_allmodels.append([]) N_MUV_dustmass_allmodels.append([]) for snap_count in range(len(z_array_full)): mean_MUV_dustmass_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) std_MUV_dustmass_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) N_MUV_dustmass_allmodels[model_number].append(np.zeros(MUV_Nbins, dtype=np.float32)) # Check to see if we're only using a subset of the files. if galaxy_plots["first_file"] is not None: first_file = galaxy_plots["first_file"] else: first_file = int(SAGE_params["FirstFile"]) if galaxy_plots["last_file"] is not None: last_file = galaxy_plots["last_file"] else: last_file = int(SAGE_params["LastFile"]) # ========================================================= # # Now go through each file and calculate the stuff we need. # # ========================================================= # # Parallelize over number of files. for fnr in range(first_file + rank, last_file + 1, size): print("Rank {0}: Model {1} File {2}".format(rank, model_number, fnr)) # Read in both the galaxies, the merged ones and combine them into # a single array. GG, Gal_Desc = rs.ReadGals_SAGE(galaxy_name, fnr, len(z_array_full)) G_Merged, _ = rs.ReadGals_SAGE(merged_name, fnr, len(z_array_full)) G = rs.Join_Arrays(GG, G_Merged, Gal_Desc) # For each snapshot, calculate properties for galaxies that exist. for snap_count, snapnum in enumerate(range(len(z_array_full))): Gals_exist = np.where((G.GridHistory[:, snapnum] != -1) & (G.GridStellarMass[:, snapnum] > 0.0) & (G.LenHistory[:, snapnum] > model_halopartcut))[0] if len(Gals_exist) == 0: continue sum_nion_allmodels[model_number][snap_count] += sum(G.GridNgamma_HI[Gals_exist, snapnum] * \ G.Gridfesc[Gals_exist,snapnum]) log_mass = np.log10(G.GridStellarMass[Gals_exist, snapnum] * 1.0e10 / model_hubble_h) fesc = G.Gridfesc[Gals_exist, snapnum] fej = G.EjectedFraction[Gals_exist, snapnum] SFR = G.GridSFR[Gals_exist, snapnum] MUV = G.GridMUV[Gals_exist, snapnum] halomass = G.GridHaloMass[Gals_exist, snapnum] * 1.0e10 / model_hubble_h dustmass = (G.GridDustColdGas[Gals_exist, snapnum] + G.GridDustColdGas[Gals_exist, snapnum]) * 1.0e10 / model_hubble_h # Calculate the mean fesc as a function of stellar mass. if galaxy_plots["mstar_fesc"]: mean_mstar_fesc_allmodels[model_number][snap_count], \ std_mstar_fesc_allmodels[model_number][snap_count], \ N_mstar_fesc_allmodels[model_number][snap_count] = \ do_2D_binning(log_mass, fesc, mean_mstar_fesc_allmodels[model_number][snap_count], std_mstar_fesc_allmodels[model_number][snap_count], N_mstar_fesc_allmodels[model_number][snap_count], mstar_bins) # Calculate the mean ejected fraction as a function of stellar mass. if galaxy_plots["mstar_fej"]: mean_mstar_fej_allmodels[model_number][snap_count], \ std_mstar_fej_allmodels[model_number][snap_count], \ N_mstar_fej_allmodels[model_number][snap_count] = \ do_2D_binning(log_mass, fej, mean_mstar_fej_allmodels[model_number][snap_count], std_mstar_fej_allmodels[model_number][snap_count], N_mstar_fej_allmodels[model_number][snap_count], mstar_bins) if galaxy_plots["mstar_SFR"]: mean_mstar_SFR_allmodels[model_number][snap_count], \ std_mstar_SFR_allmodels[model_number][snap_count], \ N_mstar_SFR_allmodels[model_number][snap_count] = \ do_2D_binning(log_mass, SFR, mean_mstar_SFR_allmodels[model_number][snap_count], std_mstar_SFR_allmodels[model_number][snap_count], N_mstar_SFR_allmodels[model_number][snap_count], mstar_bins) SMF_thissnap = np.histogram(log_mass, bins=mstar_bins) SMF_allmodels[model_number][snap_count] += SMF_thissnap[0] if galaxy_plots["UVLF"]: # For the UV Magnitude, galaxies without any UV Luminosity have # their UV Mag set to 999.0. Filter these out... w_MUV = np.where(MUV < 100.0)[0] my_MUV = MUV[w_MUV] dustcorrected_MUV = calculate_dustcorrected_MUV(my_MUV, halomass[w_MUV], dustmass[w_MUV], cosmology, model_dust_to_gas_ratio, model_radius_dust_grains, model_density_dust_grains, z_array_full[snapnum]) UVLF_thissnap = np.histogram(my_MUV, bins=MUV_bins) UVLF_allmodels[model_number][snap_count] += UVLF_thissnap[0] dustcorrected_UVLF_thissnap = np.histogram(dustcorrected_MUV, bins=MUV_bins) dustcorrected_UVLF_allmodels[model_number][snap_count] += dustcorrected_UVLF_thissnap[0] # To determine the amount of dust extinction (in dex) of each galaxy, # we'll just cheatingly do "dustcorrected_MUV - intrinsic_MUV". A1600 = dustcorrected_MUV - my_MUV mean_MUV_A1600_allmodels[model_number][snap_count], \ std_MUV_A1600_allmodels[model_number][snap_count], \ N_MUV_A1600_allmodels[model_number][snap_count] = \ do_2D_binning(my_MUV, A1600, mean_MUV_A1600_allmodels[model_number][snap_count], std_MUV_A1600_allmodels[model_number][snap_count], N_MUV_A1600_allmodels[model_number][snap_count], MUV_bins) # When determining the dustmass, only use those galaxies that have a # valid MUV. my_dustmass = dustmass[w_MUV] mean_MUV_dustmass_allmodels[model_number][snap_count], \ std_MUV_dustmass_allmodels[model_number][snap_count], \ N_MUV_dustmass_allmodels[model_number][snap_count] = \ do_2D_binning(my_MUV, my_dustmass, mean_MUV_dustmass_allmodels[model_number][snap_count], std_MUV_dustmass_allmodels[model_number][snap_count], N_MUV_dustmass_allmodels[model_number][snap_count], MUV_bins) #if(snap_count > 70): # print("z {0}: MUV_bins {1}\tUVLF {2}".format(z_array_full[snap_count], MUV_bins, UVLF_thissnap[0])) # Snapshot loop. # File Loop. # Model Loop. # Ionizing emissitivty is scaled by the simulation volume (in Mpc^3). sum_nion_allmodels[model_number] /= model_volume # Stellar Mass Function is normalized by boxsize and bin width. SMF_allmodels[model_number] = np.divide(SMF_allmodels[model_number], model_volume * mstar_bin_width) # As is the UV LF... UVLF_allmodels[model_number] = np.divide(UVLF_allmodels[model_number], model_volume * MUV_bin_width) dustcorrected_UVLF_allmodels[model_number] = np.divide(dustcorrected_UVLF_allmodels[model_number], model_volume * MUV_bin_width) # Everything has been calculated. Now construct a dictionary that contains # all the data (for easy passing) and return it. galaxy_data = {"z_array_full_allmodels" : z_array_full_allmodels, "lookback_array_full_allmodels" : lookback_array_full_allmodels, "z_array_reion_allmodels" : z_array_reion_allmodels, "lookback_array_reion_allmodels" : lookback_array_reion_allmodels, "cosmology_allmodels" : cosmology_allmodels, "t_bigbang_allmodels" : t_bigbang_allmodels, "sum_nion_allmodels" : sum_nion_allmodels, "mstar_bins" : mstar_bins, "mstar_bin_width" : mstar_bin_width, "mean_mstar_fesc_allmodels" : mean_mstar_fesc_allmodels, "std_mstar_fesc_allmodels" : std_mstar_fesc_allmodels, "N_mstar_fesc_allmodels" : N_mstar_fesc_allmodels, "SMF_allmodels" : SMF_allmodels, "mean_mstar_fej_allmodels" : mean_mstar_fej_allmodels, "std_mstar_fej_allmodels" : std_mstar_fej_allmodels, "N_mstar_fej_allmodels" : N_mstar_fej_allmodels, "mean_mstar_SFR_allmodels" : mean_mstar_SFR_allmodels, "std_mstar_SFR_allmodels" : std_mstar_SFR_allmodels, "N_mstar_SFR_allmodels" : N_mstar_SFR_allmodels, "UVLF_allmodels" : UVLF_allmodels, "dustcorrected_UVLF_allmodels" : dustcorrected_UVLF_allmodels, "mean_MUV_A1600_allmodels" : mean_MUV_A1600_allmodels, "std_MUV_A1600_allmodels" : std_MUV_A1600_allmodels, "N_MUV_A1600_allmodels" : N_MUV_A1600_allmodels, "mean_MUV_dustmass_allmodels" : mean_MUV_dustmass_allmodels, "std_MUV_dustmass_allmodels" : std_MUV_dustmass_allmodels, "N_MUV_dustmass_allmodels" : N_MUV_dustmass_allmodels, "MUV_bins" : MUV_bins, "MUV_bin_width" : MUV_bin_width} return galaxy_data
base_SAGE_ini = "{0}/ini_files/kali_SAGE.ini".format(script_dir) base_cifog_ini = "{0}/ini_files/kali_cifog.ini".format(script_dir) # Prompt for a SAGE ini path. If none provided, use the base. my_SAGE_ini = input("Template SAGE ini file [default: " "{0}]: ".format(base_SAGE_ini)) if not my_SAGE_ini: my_SAGE_ini = base_SAGE_ini # Do the same for cifog. my_cifog_ini = input("Template cifog ini file [default: " "{0}]: ".format(base_cifog_ini)) if not my_cifog_ini: my_cifog_ini = base_cifog_ini SAGE_params = rs.read_SAGE_ini(my_SAGE_ini) cifog_params, cifog_headers = rs.read_cifog_ini(my_cifog_ini) run_directory = None while not run_directory: run_directory = input("Base output directory ") if not run_directory: print("Must be specified") SAGE_fields_update = update_SAGE_dict(SAGE_params) cifog_fields_update = update_cifog_dict(cifog_params) cp.create_directories(run_directory) SAGE_fname, cifog_fname = cp.update_ini_files(base_SAGE_ini, base_cifog_ini, SAGE_fields_update, cifog_fields_update,
def make_slurm_files(base_slurm_file, SAGE_ini_names, cifog_ini_names, run_directories, Nproc): """ Makes ``slurm`` files for each run. Parameters ---------- base_slurm_file : String Path to the template slurm file. SAGE_ini_names, cifog_ini_names : List of strings, length equal to number of runs Paths to the ini file created for each run. run_directories : List of strings, length equal to number of runs Path to the base ``RSAGE`` directory for each run where all the model output will be placed. Nproc : Integer Number of processors that each run will be executed with. Returns ---------- slurm_names : List of strings, length equal to number of runs Paths to the ``slurm`` file created for each run. """ slurm_names = [] for run_number in range(len(SAGE_ini_names)): SAGE_params = ReadScripts.read_SAGE_ini(SAGE_ini_names[run_number]) run_name = SAGE_params["FileNameGalaxies"] slurm_fname = "{0}/slurm_files/{1}.slurm".format(run_directories[run_number], run_name) tmp_slurm_fname = "{0}.tmp".format(base_slurm_file) copyfile(base_slurm_file, tmp_slurm_fname) # Want to replace lines in the slurm file. Set up the strings. job_name = "#SBATCH --job-name={0}".format(run_name) ntask = "#SBATCH --ntasks={0}".format(Nproc) NUMPROC = "NUMPROC={0}".format(Nproc) SAGE_ini = 'SAGE_ini="{0}"'.format(SAGE_ini_names[run_number]) cifog_ini = 'cifog_ini="{0}"'.format(cifog_ini_names[run_number]) run_prefix = 'run_prefix="{0}"'.format(run_name) path_to_log = 'path_to_log="{0}/log_files/{1}.log"'.format(run_directories[run_number], run_name) # Replace strings at specific line numbers. line_numbers = [2, 4, 17, 19, 20, 24, 25] string_names = [job_name, ntask, NUMPROC, SAGE_ini, cifog_ini, run_prefix, path_to_log] for line, name in zip(line_numbers, string_names): # Use @ as the delimiter here for sed. command = "sed -i '{0}s@.*@{1}@' {2} ".format(line, name, tmp_slurm_fname) subprocess.call(command, shell=True) # Finally move the temporary file to the final location. command = "mv {0} {1}".format(tmp_slurm_fname, slurm_fname) subprocess.call(command, shell=True) print("Created {0}".format(slurm_fname)) slurm_names.append(slurm_fname) return slurm_names
def update_ini_files(base_SAGE_ini, base_cifog_ini, SAGE_fields_update, cifog_fields_update, run_directory): """ Using template ini files for ``SAGE`` and ``cifog``, creates new ones with the directory paths and field names updated. Parameters ---------- base_SAGE_ini, base_cifog_ini : Strings Paths to the template SAGE and cifog ini files. SAGE_fields_update, cifog_fields_update : Dictionaries Fields that will be updated and their new value. run_directory : String Path to the base ``RSAGE`` directory. Returns ---------- SAGE_fname, cifog_fname : Strings Names of the newly created ``SAGE`` and ``cifog`` ini files. """ SAGE_params = ReadScripts.read_SAGE_ini(base_SAGE_ini) cifog_params, cifog_headers = ReadScripts.read_cifog_ini(base_cifog_ini) # This is the outermost directory. SAGE_params["OutputDir"] = "{0}".format(run_directory) # Within RSAGE, we use values of `None` to signify that RSAGE should # determine the paths on runtime. SAGE_params["GalaxyOutputDir"] = "None" SAGE_params["GridOutputDir"] = "None" SAGE_params["PhotoionDir"] = "None" SAGE_params["PhotoionName"] = "None" SAGE_params["ReionRedshiftName"] = "None" cifog_params["inputNionFile"] = "None" cifog_params["output_XHII_file"] = "None" cifog_params["output_photHI_file"] = "None" cifog_params["output_restart_file"] = "None" # Now go through the parameters and update them. for name in SAGE_fields_update: SAGE_params[name] = SAGE_fields_update[name] for name in cifog_fields_update: cifog_params[name] = cifog_fields_update[name] # The unique identifier amongst each run will be `FileNameGalaxies`. prefix_tag = SAGE_params["FileNameGalaxies"] # Write out the new ini files, using `FileNameGalaxies` as the tag. SAGE_fname = "{0}/ini_files/{1}_SAGE.ini".format(run_directory, prefix_tag) cifog_fname = "{0}/ini_files/{1}_cifog.ini".format(run_directory, prefix_tag) with open (SAGE_fname, "w+") as f: for name in SAGE_params.keys(): string = "{0} {1}\n".format(name, SAGE_params[name]) f.write(string) with open (cifog_fname, "w+") as f: for name in cifog_params.keys(): if name in cifog_headers.keys(): string = "{0}\n".format(cifog_headers[name]) f.write(string) string = "{0} = {1}\n".format(name, cifog_params[name]) f.write(string) return SAGE_fname, cifog_fname
import PlotScripts import ReadScripts import AllVars matplotlib.rcdefaults() plt.rc('text', usetex=True) output_format = '.png' if __name__ == '__main__': gal_filepath = "/home/jseiler/self_consistent_SAGE/tests/test_output/galaxies/kali_test_z5.782" merged_gal_filepath = "/home/jseiler/self_consistent_SAGE/tests/test_output/galaxies/kali_test_MergedGalaxies" snap = 77 GG, Gal_Desc = ReadScripts.ReadGals_SAGE(gal_filepath, 0, 99) # Read galaxies G_Merged, _ = ReadScripts.ReadGals_SAGE(merged_gal_filepath, 0, 99) G = ReadScripts.Join_Arrays( GG, G_Merged, Gal_Desc) # Then join them together for all galaxies. w = np.where((G.GridHistory[:,snap] != -1) & \ (G.GridStellarMass[:,snap] > 0.0))[0] w_wrong = w[np.where(G.GridNgamma_HI[w, snap] == 0)[0]] w_right = w[np.where(G.GridNgamma_HI[w, snap] > 0)[0]] print("There were {0} galaxies at snapshot {1}. Of these, {2} had an " "Ngamma value of 0.".format(len(w), snap, len(w_wrong))) no_sat = np.zeros(len(w_wrong))