def loadMergerEnvironments(run, loadsave=True, verbose=True, version=_VERSION): """ Load all subhalo environment data as a dictionary with keys from ``ENVIRON``. NOTE: the 'env_in' dictionary was created using `_in_merger_environments()` (i.e. manually), and might not be recreated appropriately by `_collectMergerEnvironments()`. Arguments --------- run <int> : illustris simulation run number, {1, 3} loadsave <bool> : optional, load existing save if it exists, otherwise create new verbose <bool> : optional, print verbose output version <flt> : optional, version number to load (can only create current version!) Returns ------- env <dict> : all environment data for all subhalos, keys given by ``ENVIRON`` class """ if verbose: print(" - - Environments.loadMergerEnvironments()") fname_out = _GET_MERGER_ENVIRONMENT_FILENAME(run, version=version) fname_in = zio.modify_filename(fname_out, append='_in') # Try to Load Existing Save File # ------------------------------ if loadsave: if verbose: print(( " - - Attempting to load saved file from '{}' and '{}'".format( fname_out, fname_in))) if os.path.exists(fname_out): env_out = zio.npzToDict(fname_out) env_in = zio.npzToDict(fname_in) if verbose: print(" - - - Loaded.") else: print((" - - - File '{}' or '{}' does not exist!".format( fname_out, fname_in))) loadsave = False # Import environment data directly, and save # ------------------------------------------ if not loadsave: if verbose: print((" - - Importing Merger Environments, version %s" % (str(_VERSION)))) env_out, env_in = _collectMergerEnvironments(run, verbose=verbose, version=version) zio.dictToNPZ(env_out, fname_out, verbose=True) zio.dictToNPZ(env_in, fname_in, verbose=True) return env_out, env_in
def _checkLoadSave(fname, loadsave, log): """See if a file exists and can be loaded, or if it needs to be reconstructed. """ log.debug("_checkLoadSave()") log.debug(" - Checking for file '%s'" % (fname)) data = None if os.path.exists(fname): logStr = " - File exists..." if not loadsave: logStr += " not loading it." log.debug(logStr) else: logStr += " loading..." log.debug(logStr) try: data = zio.npzToDict(fname) except Exception as err: log.warning(" - Load Failed: %s." % (str(err))) else: log.debug(" - Loaded data.") else: log.debug(" - File does not exist.") return data
def loadTree(run, mrgs=None, loadsave=True, verbose=True): """Load tree data from save file if possible, or recalculate directly. Arguments --------- run : <int>, Illlustris run number {1, 3} mrgs : <dict>, (optional=None), BHMerger data, reloaded if not provided loadsave : <bool>, (optional=True), try to load tree data from previous save verbose : <bool>, (optional=True), Print verbose output Returns ------- tree : <dict>, container for tree data - see BHTree doc """ if verbose: print(" - - BHTree.loadTree()") fname = constants.GET_BLACKHOLE_TREE_FILENAME(run, VERSION) # Reload existing BH Merger Tree # ------------------------------ if loadsave: if verbose: print((" - - - Loading save file '{:s}'".format(fname))) if os.path.exists(fname): tree = zio.npzToDict(fname) if verbose: print(" - - - - Tree loaded") else: loadsave = False warnStr = "File '%s' does not exist!" % (fname) warnings.warn(warnStr, RuntimeWarning) # Recreate BH Merger Tree # ----------------------- if not loadsave: if verbose: print(" - - - Reconstructing BH Merger Tree") # Load Mergers if needed if mrgs is None: from illpy_lib.illbh import mergers mrgs = mergers.load_fixed_mergers(run) if verbose: print((" - - - - Loaded {:d} mrgs".format(mrgs[MERGERS.NUM]))) # Construct Tree if verbose: print(" - - - - Constructing Tree") tree = _constructBHTree(run, mrgs, verbose=verbose) # Analyze Tree Data, store meta-data to tree dictionary timeBetween, numPast, numFuture = analyzeTree(tree, verbose=verbose) # Save Tree data zio.dictToNPZ(tree, fname, verbose=True) return tree
def loadAllUniqueIDs(run=Settings.run, loadsave=True, log=None, sets=None): """ """ if sets is not None: run = sets.run log = _checkLog(log, run=run) log.debug("loadAllUniqueIDs()") from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.rank fname = bh_constants.GET_DETAILS_ALL_UNIQUE_IDS_FILENAME(run, __version__) log.debug(" - Filename '%s'" % (fname)) if os.path.exists(fname): log.debug(" - - File Exists.") if loadsave: log.debug(" - - Loading.") data = zio.npzToDict(fname) else: log.debug(" - - Not Loading.") else: log.debug(" - File does Not exist.") loadsave = False if not loadsave: # Calculate the unique IDs for each snapshot _calculateAllUniqueIDs(run, loadsave=False, log=log) # Merge unique IDs from each Snapshot if rank == 0: snaps, ids, scales = _mergeAllUnique(run, log) # Save data data = _saveUnique(run, snaps, fname, ids, scales, log) if rank == 0: return data return
def _loadAndCheckEnv(fname, rads, lenTypeExp, warn=False, care=True): """ Load merger-subhalo environment file and perform consistency checks on its contents. Compares the total number of particles loaded of each type to that expected (from group-cat). Discrepancies are allowed (but Warning is made) - because some particles might not have fit in the range of bin radii. If all the particles from a given type are missing however, it is assumed that there is something wrong (this happens for some reason...). Compares the positions of the radial bins in the loaded file with those that are expected. Arguments --------- fname <str> : filename to load from rads <flt>[N] : positions of radial bins lenTypeExp <int>[M] : number of expected particles of each type (from group-cat) warn <bool> : optional, print optional warning messages on errors care <bool> : optional, return a failure status more easily Returns ------- dat <dict> : merger-subhalo environment data stat <bool> : success/good (``True``) or failure/bad (``False``) """ # Load Merger-Subhalo Environment Data dat = zio.npzToDict(fname) # Assume good stat = True # Compare particle counts lenTypesAct = np.sum(dat[ENVIRON.NUMS], axis=1) # Its possible that counts *shouldnt* match... b/c particles outside bins if not np.all(lenTypesAct == lenTypeExp): gcatStr = ', '.join(['{:d}'.format(np.int(num)) for num in lenTypeExp]) datStr = ', '.join(['{:d}'.format(num) for num in lenTypesAct]) # Always warn for this warnStr = "Numbers mismatch in '%s'" % (fname) warnStr += "\nFilename '%s'" % (fname) warnStr += "\nGcat = '%s', dat = '%s'" % (gcatStr, datStr) warnings.warn(warnStr, RuntimeWarning) if care: stat = False else: # See if all particles of any type are unexpectedly missing for ii in range(2): if lenTypesAct[ii] == 0 and lenTypeExp[ii] != 0: # Set as bad stat = False # Send Warning if warn: warnStr = "All particle of type %d are missing in data!" % ( ii) warnStr += "Filename '%s'" % (fname) warnings.warn(warnStr, RuntimeWarning) # Make Sure Radii Match if not np.all(rads == dat[ENVIRON.RADS]): stat = False if warn: warnStr = "Radii mismatch!" warnStr += "\nFilename '%s'" % (fname) warnings.warn(warnStr, RuntimeWarning) return dat, stat
def _loadSingleMergerEnv(run, snap, subhalo, boundID=None, radBins=None, loadsave=True, verbose=False): """ Import and save merger-subhalo environment data. Arguments --------- run <int> : illustris simulation number {1, 3} snap <int> : illustris snapshot number {0, 135} subhalo <int> : subhalo index number for shit snapshot boundID <int> : ID of this subhalo's most-bound particle radBins <flt>[N] : optional, positions of radial bins for creating profiles loadSave <bool> : optional, load existing save file if possible verbose <bool> : optional, print verbose output Returns ------- env <dict> : loaded dictionary of environment data retStat <int> : ``_ENVSTAT`` value for status of this environment """ if verbose: print(" - - Environments._loadSingleMergerEnv()") fname = _GET_MERGER_SUBHALO_FILENAME(run, snap, subhalo) if verbose: print((" - - - Filename '%s'" % (fname))) # If we shouldnt or cant load existing save, reload profiles if not loadsave or not os.path.exists(fname): # Load Radial Profiles radProfs = Profiler.subhaloRadialProfiles(run, snap, subhalo, radBins=radBins, mostBound=boundID, verbose=verbose) # Invalid profiles on failure if radProfs is None: warnStr = "INVALID PROFILES at Run %d, Snap %d, Subhalo %d, Bound ID %s" \ % (run, snap, subhalo, str(boundID)) warnings.warn(warnStr, RuntimeWarning) # Set return status to failure retStat = _ENVSTAT.FAIL env = None # Valid profiles else: # Unpack data outRadBins, posRef, retBoundID, partTypes, partNames, numsBins, \ massBins, densBins, potsBins, dispBins = radProfs if boundID is not None and retBoundID != boundID: warnStr = "Run %d, SNap %d, Subhalo %d" % (run, snap, subhalo) warnStr += "\nSent BoundID = %d, Returned = %d!" % (boundID, retBoundID) warnings.warn(warnStr, RuntimeWarning) # Build dict of data env = { ENVIRON.RUN: run, ENVIRON.SNAP: snap, ENVIRON.VERS: _VERSION, ENVIRON.DATE: datetime.now().ctime(), ENVIRON.TYPE: partTypes, ENVIRON.NAME: partNames, ENVIRON.SUBH: subhalo, ENVIRON.BPID: retBoundID, ENVIRON.CENT: posRef, ENVIRON.RADS: outRadBins, ENVIRON.NUMS: numsBins, ENVIRON.DENS: densBins, ENVIRON.MASS: massBins, ENVIRON.POTS: potsBins, ENVIRON.DISP: dispBins } # Save Data as NPZ file zio.dictToNPZ(env, fname, verbose=verbose) # Set return status to new file created retStat = _ENVSTAT.NEWF # File already exists else: # Load data from save file env = zio.npzToDict(fname) if verbose: print( (" - - - File already exists for Run %d, Snap %d, Subhalo %d" % (run, snap, subhalo))) # Set return status to file already exists retStat = _ENVSTAT.EXST return env, retStat
def _loadSingleSnapshotBHs(run, snapNum, numMergers, idxs, bhids, logger, rank=0, loadsave=True): """Load the data for BHs in a single snapshot, save to npz file. If no indices (``idxs``) or BH IDs (``bhids``) are given, or this is a 'bad' snapshot, then it isn't actually loaded and processed. An NPZ file with all zero entries is still produced. Arguments --------- run : int, Illustris simulation number {1, 3}. snapNum : int, Illustris snapshot number {1, 135}. numMergers : int, Total number of mrgs. idxs : (N,) array of int, Merger indices for this snapshot with `N` mrgs. bhids : (N,2,) array of int, BH ID numbers, `IN` and `OUT` BH for each merger. logger : ``logging.Logger`` object, Object for logging. rank : int Rank of this processor (used for logging). loadsave : bool, Load existing save if it exists. Returns ------- data : dict, Data for this snapshot. pos : int, Number of BHs successfully found. neg : int, Number of BHs failed to be found. new : int, `+1` if a new file was created, otherwise `0`. """ import illpy as ill logger.warning("BHSnapshotData._loadSingleSnapshotBHs()") illdir = GET_ILLUSTRIS_OUTPUT_DIR(run) fname = _GET_BH_SINGLE_SNAPSHOT_FILENAME(run, snapNum) logger.warning("Snap %d, filename '%s'" % (snapNum, fname)) pos = 0 neg = 0 new = 0 # Load and Return existing save if desired # ---------------------------------------- if (loadsave and os.path.exists(fname)): logger.warning("Loading existing file") data = zio.npzToDict(fname) return data, pos, neg, new # Initialize dictionary of results # -------------------------------- logger.info("Initializing storage") data = _initStorage(numMergers) for index, tid in zip(idxs, bhids): for BH in [BH_TYPE.IN, BH_TYPE.OUT]: data[BH_SNAP.TARGET][index, BH] = tid[BH] # Decide if this is a valid Snapshot # ---------------------------------- process_snapshot = True # Some illustris-1 snapshots are bad if (snapNum in GET_BAD_SNAPS(run)): logger.warning("Skipping bad snapshot.") process_snapshot = False # Make sure there are mrgs in this snapshot if (len(idxs) <= 0 or len(bhids) <= 0): logger.warning("Skipping snap %d with no valid BHs" % (snapNum)) process_snapshot = False # Load And Process Snapshot if its good # ------------------------------------- if (process_snapshot): logger.info("Processing Snapshot") # Load Snapshot # ------------- logger.debug("- Loading snapshot %d" % (snapNum)) with zio.StreamCapture() as output: snapshot = ill.snapshot.loadSubset(illdir, snapNum, 'bh', fields=SNAPSHOT_FIELDS) snap_keys = list(snapshot.keys()) if ('count' in snap_keys): snap_keys.remove('count') logger.debug("- - Loaded %d particles" % (snapshot['count'])) # Make sure all target keys are present union = list(set(snap_keys) & set(SNAPSHOT_FIELDS)) if (len(union) != len(SNAPSHOT_FIELDS)): logger.error("snap_keys = '%s'" % (str(snap_keys))) logger.error("SNAPSHOT_FIELDS = '%s'" % (str(SNAPSHOT_FIELDS))) errStr = "Field mismatch at Rank %d, Snap %d!" % (rank, snapNum) zio._mpiError(comm, log=logger, err=errStr) # Match target BHs # ---------------- logger.debug("- Matching %d BH Mergers" % (len(bhids))) for index, tid in zip(idxs, bhids): for BH in [BH_TYPE.IN, BH_TYPE.OUT]: ind = np.where(snapshot['ParticleIDs'] == tid[BH])[0] if (len(ind) == 1): pos += 1 data[BH_SNAP.VALID][index, BH] = True for key in SNAPSHOT_FIELDS: data[key][index, BH] = snapshot[key][ind[0]] else: neg += 1 logger.debug("- Processed, pos %d, neg %d" % (pos, neg)) # Add Metadata and Save File # ========================== logger.debug("Adding metadata to dictionary") data[BH_SNAP.RUN] = run data[BH_SNAP.SNAP] = snapNum data[BH_SNAP.VERSION] = _VERSION data[BH_SNAP.CREATED] = datetime.now().ctime() data[BH_SNAP.DIR_SRC] = illdir data[BH_SNAP.FIELDS] = SNAPSHOT_FIELDS data[BH_SNAP.DTYPES] = SNAPSHOT_DTYPES logger.info("Saving data to '%s'" % (fname)) zio.dictToNPZ(data, fname) new = 1 return data, pos, neg, new
def loadBHSnapshotData(run, version=None, loadsave=True, verbose=False, logger=None): """Load an existing BH Snapshot data save file, or attempt to recreate it (slow!). If the data is recreated (using ``_mergeBHSnapshotFiles``), it will be saved to an npz file. The loaded parameters are stored to a dictionary with keys given by the parameters in ``bh_constants.BH_SNAP``. Arguments --------- run : int, Illustris run number {1, 3}. version : flt, Version number to load/save. loadsave : bool, If `True`, attempt to load an existing save. verbose : bool, Print verbose output. logger : ``logging.Logger`` object, Object to use for logging output messages. Returns ------- data : dict, Dictionary of BH Snapshot data. Keys are given by the entries to ``bh_constants.BH_SNAP``. """ # Create default logger if needed # ------------------------------- if (not isinstance(logger, logging.Logger)): logger = zio.default_logger(logger, verbose=verbose) logger.debug("BHSnapshotData.loadBHSnapshotData()") if (version is None): version = _VERSION oldVers = False # Warn if attempting to use an old version number if (version != _VERSION): oldVers = True logger.warning("WARNING: loading v%.2f behind current v%.2f" % (version, _VERSION)) # Get save filename fname = _GET_BH_SNAPSHOT_FILENAME(run, version=version) # Load Existing File # ------------------ if (loadsave): logger.info("Loading from '%s'" % (fname)) if (os.path.exists(fname)): data = zio.npzToDict(fname) else: logger.warning("WARNING: '%s' does not exist! Recreating!" % (fname)) loadsave = False # Recreate data (Merge individual snapshot files) # ----------------------------------------------- if (not loadsave): logger.info("Recreating '%s'" % (fname)) # Dont allow old versions to be recreated if (oldVers): raise RuntimeError("Cannot recreate outdated version %.2f!!" % (version)) data = _mergeBHSnapshotFiles(run, logger) # Add Metadata logger.debug("Adding metadata") data[BH_SNAP.RUN] = run data[BH_SNAP.VERSION] = _VERSION data[BH_SNAP.CREATED] = datetime.now().ctime() data[BH_SNAP.FIELDS] = SNAPSHOT_FIELDS data[BH_SNAP.DTYPES] = SNAPSHOT_DTYPES # Save logger.debug("Saving") zio.dictToNPZ(data, fname) logger.info("Saved to '%s'" % (fname)) return data
def loadDistFuncs(sets, log=None): """Load precalculated Distribution Functions from save file as spline functions. Arguments --------- run : int illustris run number {1, 3} validFrac : float or `None` Required fraction of galaxy distribution function values to be valid (> 0.0) for the galaxy as a whole to be considered valid. `None` means no requirement. log : logging.Logger` object Returns ------- valid : ndarray of bools, shape (N, ) Flag describing whether each galaxy is valid or not. `N` is the number of galaxies. dfs : ndarray of callable, scalar functions; shape (N, ) log-splines of distribution functions for each galaxy. Invalid galaxies have the `zeroFunc` given - which return ndarrays of 0.0 """ if log is None: log = constants.load_logger(True, False, __file__) log.debug("loadDistFuncs()") validFrac = sets.DIST_FUNC_VALID_FRAC # Load Data # --------- fname = sets.GET_DIST_FUNC_FILENAME(vers=__version__) if not os.path.exists(fname): errStr = "File '%s' does not exist!" % (fname) log.error(errStr) log.error("Run ``%s`` to generate the distribution functions!" % (__file__)) raise RuntimeError(errStr) dfData = zio.npzToDict(fname) dfRun = dfData['run'] log.debug("Loaded distribution function data from '%s', run %d" % (fname, dfRun)) assert sets.RUN == dfRun, "Run numbers do not match!" eps = dfData['eps'] dist_funcs = dfData['distfuncs'] numMergers, numEners = np.shape(eps) log.debug(" - %d Mergers, with %d Energies" % (numMergers, numEners)) # Determine `valid` galaxies # ---------------------------- # Initially `valid` are those with any nonzero entries valid = np.any(dist_funcs != 0.0, axis=1) numVal = np.count_nonzero(valid) frac = 1.0 * numVal / numMergers log.info("Loaded %d/%d = %.4f nonzero distribution functions." % (numVal, numMergers, frac)) # Find galaxies with non-finite values, set to invalid bad_map = ~np.isfinite(dist_funcs) num_bad = np.count_nonzero(bad_map, axis=1) num_gals_with_bad = np.count_nonzero(num_bad[valid]) frac = num_gals_with_bad / numVal log.info("%d/%d = %.4f Galaxies have non-finite values" % (num_gals_with_bad, numVal, frac)) ave_bad = num_bad[valid].mean() log.debug("Average number of non-finite values = %.2f" % (ave_bad)) valid[num_bad > 0] = False # Find bad entries in valid galaxies bad_map = (dist_funcs <= 0.0) num_bad = np.count_nonzero(bad_map, axis=1) ave_bad = num_bad[valid].mean() log.debug("Average number of bad values in valid galaxies = %.2f" % (ave_bad)) # Implement required fraction of good entries if validFrac: log.debug("Requiring fraction: %.4f good entries for valid galaxies" % (validFrac)) # Find the fraction of bad values for each galaxy fracBad = num_bad / numEners # Only look at those which start as valid # valInds = np.where(valid)[0] # inds = valInds[np.where(fracBad > 1.0-validFrac)[0]] # numBad = np.size(inds) # valInds = np.where(valid)[0] bads = valid & (fracBad > 1.0 - validFrac) numBad = np.count_nonzero(bads) frac = 1.0 * numBad / numVal log.debug(" - %d/%d = %.4f Galaxies are below threshold" % (numBad, numVal, frac)) valid[bads] = False # Construct Splines of Distribution Functions # ------------------------------------------- dfs = np.empty(numMergers, dtype=object) valInds = np.where(valid)[0] numVal = np.size(valInds) log.debug("Interating over %d valid galaxies" % (numVal)) # Iterate over valid galaxies for ii, val in enumerate(tqdm.tqdm(valInds, desc="Constructing splines")): dfs[val] = _useSpline(eps[val], dist_funcs[val]) # Set invalid galaxies to the 'zero function' dfs[~valid] = constants._zeroFunc return valid, dfs
def loadBHHostsSnap(run, snap, version=None, loadsave=True, verbose=True, bar=None, convert=None): """Load pre-existing, or manage the creation of the particle offset table. Arguments --------- run <int> : illustris simulation number {1, 3} snap <int> : illustris snapshot number {1, 135} loadsave <bool> : optional, load existing table verbose <bool> : optional, print verbose output Returns ------- offsetTable <dict> : particle offset table, see `ParticleHosts` docs for more info. """ if verbose: print(" - - ParticleHosts.loadBHHostsSnap()") if (bar is None): bar = bool(verbose) # Load Existing Save # ================== if (loadsave): saveFile = _GET_BH_HOSTS_SNAP_TABLE_FILENAME(run, snap, version) if verbose: print((" - - - Loading from save '{:s}'".format(saveFile))) # Make sure path exists if (os.path.exists(saveFile)): hostTable = zio.npzToDict(saveFile) if verbose: print(" - - - - Table loaded") else: if verbose: print(" - - - - File does not Exist, reconstructing BH Hosts") loadsave = False # Reconstruct Hosts Table # ======================= if (not loadsave): if verbose: print(" - - - Constructing Offset Table") start = datetime.now() if (version is not None): raise RuntimeError("Can only create version '%s'" % _VERSION) saveFile = _GET_BH_HOSTS_SNAP_TABLE_FILENAME(run, snap) offsetFile = '' if (convert is not None): offsetFile = _GET_OFFSET_TABLE_FILENAME(run, snap, version=convert) if verbose: print((" - - - Trying to convert from existing '{:s}'".format( offsetFile))) # Convert an Existing (Full) Offset Table into BH Hosts # ----------------------------------------------------- if os.path.exists(offsetFile): offsetTable = zio.npzToDict(offsetFile) bhInds = offsetTable[OFFTAB.BH_INDICES] bhIDs = offsetTable[OFFTAB.BH_IDS] bhHalos = offsetTable[OFFTAB.BH_HALOS] bhSubhs = offsetTable[OFFTAB.BH_SUBHALOS] else: if verbose: print(" - - - Reconstructing offset table") # Construct Offset Data haloNums, subhNums, offsets = _constructOffsetTable( run, snap, verbose=verbose) # Construct BH index Data # Catch errors for bad snapshots try: bhInds, bhIDs = _constructBHIndexTable(run, snap, verbose=verbose) except: # If this is a known bad snapshot, set values to None if (snap in GET_BAD_SNAPS(run)): if verbose: print( (" - - - BAD SNAPSHOT: RUN {:d}, Snap {:d}".format( run, snap))) bhInds = None bhIDs = None bhHalos = None bhSubhs = None # If this is not a known problem, still raise error else: print(( "this is not a known bad snapshot: run {:d}, snap {:d}" .format(run, snap))) raise # On success, Find BH Subhalos else: binInds = np.digitize(bhInds, offsets[:, PARTICLE.BH]).astype( DTYPE.INDEX) - 1 if (any(binInds < 0)): raise RuntimeError("Some bhInds not matched!! '%s'" % (str(bads))) bhHalos = haloNums[binInds] bhSubhs = subhNums[binInds] # Save To Dict # ------------ hostTable = {} # Metadata hostTable[OFFTAB.RUN] = run hostTable[OFFTAB.SNAP] = snap hostTable[OFFTAB.VERSION] = _VERSION hostTable[OFFTAB.CREATED] = datetime.now().ctime() hostTable[OFFTAB.FILENAME] = saveFile # BH Data hostTable[OFFTAB.BH_INDICES] = bhInds hostTable[OFFTAB.BH_IDS] = bhIDs hostTable[OFFTAB.BH_HALOS] = bhHalos hostTable[OFFTAB.BH_SUBHALOS] = bhSubhs # Save to file zio.dictToNPZ(hostTable, saveFile, verbose=verbose) stop = datetime.now() if verbose: print((" - - - - Done after {:s}".format(str(stop - start)))) return hostTable
def loadBHHosts(run, loadsave=True, version=None, verbose=True, bar=None, convert=None): """Merge individual snapshot's blackhole hosts files into a single file. Arguments --------- run <int> : illustris simulation number {1, 3} loadsave <bool> : optional, load existing save if possible version <flt> : optional, target version number verbose <bool> : optional, bar <bool> : optional, convert <bool> : optional, Returns ------- bhHosts <dict> : table of hosts for all snapshots """ if verbose: print(" - - ParticleHosts.loadBHHosts()") if (bar is None): bar = bool(verbose) # Load Existing Save # ================== if (loadsave): saveFile = _GET_BH_HOSTS_TABLE_FILENAME(run, version=version) if verbose: print((" - - - Loading from save '{:s}'".format(saveFile))) # Make sure path exists if (os.path.exists(saveFile)): bhHosts = zio.npzToDict(saveFile) if verbose: print(" - - - - Table loaded") else: if verbose: print(" - - - - File does not Exist, reconstructing BH Hosts") loadsave = False # Reconstruct Hosts Table # ======================= if (not loadsave): if verbose: print(" - - - Constructing Hosts Table") start = datetime.now() if (version is not None): raise RuntimeError("Can only create version '%s'" % _VERSION) saveFile = _GET_BH_HOSTS_TABLE_FILENAME(run) # Create progress-bar pbar = zio.getProgressBar(NUM_SNAPS) if bar: pbar.start() # Select the dict-keys for snapshot hosts to transfer hostKeys = [ OFFTAB.BH_IDS, OFFTAB.BH_INDICES, OFFTAB.BH_HALOS, OFFTAB.BH_SUBHALOS ] # Create dictionary # ----------------- bhHosts = {} # Add metadata bhHosts[OFFTAB.RUN] = run bhHosts[OFFTAB.VERSION] = _VERSION bhHosts[OFFTAB.CREATED] = datetime.now().ctime() bhHosts[OFFTAB.FILENAME] = saveFile # Load All BH-Hosts Files # ----------------------- for snap in range(NUM_SNAPS): # Load Snapshot BH-Hosts hdict = loadBHHostsSnap(run, snap, loadsave=True, verbose=True, convert=convert) # Extract and store target data snapStr = OFFTAB.snapDictKey(snap) bhHosts[snapStr] = {hkey: hdict[hkey] for hkey in hostKeys} if bar: pbar.update(snap) if bar: pbar.finish() # Save to file zio.dictToNPZ(bhHosts, saveFile, verbose=verbose) stop = datetime.now() if verbose: print(" - - - - Done after %s" % (str(stop - start))) return bhHosts