示例#1
0
def save_grid(fname, grid, grid_names, grid_temps, grid_valid):
    fname = os.path.abspath(fname)
    with h5py.File(fname, 'w') as out:
        group = out.create_group('grid')
        for nn, vv in zip(grid_names, grid):
            group.create_dataset(nn, data=vv)

        group = out.create_group('parameters')
        for nn, vv in META.items():
            group.create_dataset(nn, data=vv)

        out.create_dataset('temps', data=grid_temps)
        out.create_dataset('valid', data=grid_valid)

    logging.info("Saved to '{}' size '{}'".format(fname,
                                                  zio.get_file_size(fname)))
    return
示例#2
0
 def save_fig(self,
              fig,
              fname,
              path=None,
              subdir=None,
              snap_num=None,
              modify=False,
              verbose=False,
              **kwargs):
     fname = self.fname(fname,
                        path=path,
                        subdir=subdir,
                        snap_num=snap_num,
                        modify=modify)
     kwargs.setdefault('transparent', True)
     kwargs.setdefault('dpi', 100)
     zio.check_path(fname)
     fig.savefig(fname, **kwargs)
     if verbose:
         print("saved to '{}' size: {}".format(fname,
                                               zio.get_file_size(fname)))
     return fname
示例#3
0
def _reformat_to_hdf5(core, snap, temp_fname, out_fname):
    """
    """

    log = core.log
    cosmo = core.cosmo
    log.debug("details._reformat_to_hdf5()")
    log.info("Snap {}, {} ==> {}".format(snap, temp_fname, out_fname))
    CONV_ILL_TO_CGS = core.cosmo.CONV_ILL_TO_CGS

    loadsave = (not core.sets.RECREATE)

    # Make Sure Temporary Files exist, Otherwise re-create them
    if (not os.path.exists(temp_fname)):
        log.raise_error("Temp file '{}' does not exist!".format(temp_fname))

    # Try to load from existing save
    if loadsave:
        if os.path.exists(out_fname):
            log.info("\tOutput file '{}' already exists.".format(out_fname))
            return

    # Load dets from ASCII File
    vals = _load_bhdetails_ascii(temp_fname)
    ids, scales, masses, mdots, rhos, cs = vals
    # Sort by ID number, then by scale-factor
    sort = np.lexsort((scales, ids))
    vals = [vv[sort] for vv in vals]
    ids, scales, masses, mdots, rhos, cs = vals

    # Find unique ID numbers, their first occurence indices, and the number of occurences
    u_ids, u_inds, u_counts = np.unique(ids,
                                        return_index=True,
                                        return_counts=True)
    num_unique = u_ids.size
    log.info("\tunique IDs: {}".format(zmath.frac_str(num_unique, ids.size)))

    # Calculate mass-differences
    dmdts = np.zeros_like(mdots)
    for ii, nn in zip(u_inds, u_counts):
        j0 = slice(ii, ii + nn - 1)
        j1 = slice(ii + 1, ii + nn)
        # t0 = cosmo.scale_to_age(scales[j0])
        # t1 = cosmo.scale_to_age(scales[j1])
        z0 = cosmo._a_to_z(scales[j0])
        z1 = cosmo._a_to_z(scales[j1])
        t0 = cosmo.age(z0).cgs.value
        t1 = cosmo.age(z1).cgs.value
        m0 = masses[j0]
        m1 = masses[j1]
        dm = m1 - m0
        dt = t1 - t0

        # dmdts[j1] = (m1 - m0) / dt

        ss = np.ones_like(dm)
        neg = (dm < 0.0) | (dt < 0.0)
        ss[neg] *= -1

        inds = (dt != 0.0)
        dmdts[j1][inds] = ss[inds] * np.fabs(dm[inds] / dt[inds])

    # Convert dmdts to same units as mdots
    dmdts = dmdts * CONV_ILL_TO_CGS.MASS / CONV_ILL_TO_CGS.MDOT

    with h5py.File(out_fname, 'w') as out:
        out.attrs[DETAILS.RUN] = core.sets.RUN_NUM
        out.attrs[DETAILS.SNAP] = snap
        out.attrs[DETAILS.NUM] = len(ids)
        out.attrs[DETAILS.CREATED] = str(datetime.now().ctime())
        out.attrs[DETAILS.VERSION] = VERSION

        out.create_dataset(DETAILS.IDS, data=ids)
        out.create_dataset(DETAILS.SCALES, data=scales)
        out.create_dataset(DETAILS.MASSES, data=masses)
        out.create_dataset(DETAILS.MDOTS, data=mdots)
        out.create_dataset(DETAILS.DMDTS, data=dmdts)
        out.create_dataset(DETAILS.RHOS, data=rhos)
        out.create_dataset(DETAILS.CS, data=cs)

        out.create_dataset(DETAILS.UNIQUE_IDS, data=u_ids)
        out.create_dataset(DETAILS.UNIQUE_INDICES, data=u_inds)
        out.create_dataset(DETAILS.UNIQUE_COUNTS, data=u_counts)

    size_str = zio.get_file_size(out_fname)
    log.info("\tSaved snap {} to '{}', size {}".format(snap, out_fname,
                                                       size_str))

    return
示例#4
0
def main():
    core = Core(
        sets=dict(LOG_FILENAME="log_illbh-snapshots.log", RECREATE=True))
    log = core.log

    log.info("details.main()")
    print(log.filename)

    beg = datetime.now()

    fname = core.paths.fname_bh_particles
    exists = os.path.exists(fname)

    recreate = core.sets.RECREATE
    log.debug("File '{}' exists: {}".format(fname, exists))

    if not recreate and exists:
        log.info("Particle file exists: '{}'".format(fname))
        return

    log.warning("Loading BH particle data from snapshots")
    fname_temp = zio.modify_filename(fname, prepend='_')

    log.debug("Writing to temporary file '{}'".format(fname_temp))
    # log.error("WARNING: running in TEMPORARY append mode!")
    # with h5py.File(fname_temp, 'a') as out:
    with h5py.File(fname_temp, 'r') as out:

        all_ids = set()

        for snap in core.tqdm(range(NUM_SNAPS), desc='Loading snapshots'):

            log.debug("Loading snap {}".format(snap))
            snap_str = '{:03d}'.format(snap)
            group = out.create_group(snap_str)

            try:
                bhs = illpy.snapshot.loadSubset(core.paths.INPUT, snap,
                                                PARTICLE.BH)
            except Exception as err:
                log.error("FAILED on snap {}!!!".format(snap))
                continue

            num_bhs = bhs['count']
            log.info("Snap {} Loaded {} BHs".format(snap, num_bhs))
            group.attrs['count'] = num_bhs
            if num_bhs == 0:
                continue

            ids = bhs['ParticleIDs']
            all_ids = all_ids.union(ids)
            sort = np.argsort(ids)

            keys = list(bhs.keys())
            keys.pop(keys.index('count'))
            for kk in keys:
                group.create_dataset(kk, data=bhs[kk][:][sort])
        '''
        for snap in core.tqdm(range(NUM_SNAPS), desc='Loading snapshots'):

            log.debug("Loading snap {}".format(snap))
            snap_str = '{:03d}'.format(snap)
            group = out[snap_str]

            if 'ParticleIDs' not in group.keys():
                log.error("Skipping snap {}".format(snap))
                continue

            ids = group['ParticleIDs']
            num_bhs = ids.size
            group.attrs['num'] = num_bhs

            all_ids = all_ids.union(ids)
        '''

        all_ids = np.array(sorted(list(all_ids)))
        first = NUM_SNAPS * np.ones_like(all_ids, dtype=np.uint32)
        last = np.zeros_like(all_ids, dtype=np.uint32)

        # Find the first and last snapshot that each BH is found in
        for snap in core.tqdm(range(NUM_SNAPS), desc='Finding first/last'):
            snap_str = '{:03d}'.format(snap)
            try:
                ids = out[snap_str]['ParticleIDs'][:]
            except KeyError as err:
                lvl = log.INFO if (snap in [53, 55]) else log.ERROR
                log.log(
                    lvl,
                    "Failed to access `ParticleIDs` from snap {}".format(snap))
                log.log(lvl, str(err))
                continue

            slots = np.searchsorted(all_ids, ids)
            first[slots] = np.minimum(first[slots], snap)
            last[slots] = np.maximum(last[slots], snap)

        out.create_dataset('unique_ids', data=all_ids)
        out.create_dataset('unique_first_snap', data=first)
        out.create_dataset('unique_last_snap', data=last)

    log.debug("Moving temporary to final file '{}' ==> '{}'".format(
        fname_temp, fname))
    shutil.move(fname_temp, fname)

    size_str = zio.get_file_size(fname)
    end = datetime.now()
    log.info("Saved to '{}', size {}, after {}".format(fname, size_str,
                                                       end - beg))

    return
示例#5
0
def _load_bh_hosts_snap_table(run, snap, log, version=None, load_saved=True):
    """Load pre-existing, or manage the creation of the particle offset table.

    Arguments
    ---------
    run      <int>  : illustris simulation number {1, 3}
    snap     <int>  : illustris snapshot number {1, 135}
    load_saved <bool> : optional, load existing table
    verbose  <bool> : optional, print verbose output

    Returns
    -------
    offsetTable <dict> : particle offset table, see `ParticleHosts` docs for more info.

    """
    log.debug("particle_hosts._load_bh_hosts_snap_table()")
    beg_all = datetime.now()

    fname = _get_filename_bh_hosts_snap_table(run, snap)
    _path = zio.check_path(fname)
    if not os.path.isdir(_path):
        log.raise_error("Error with path for '{}' (path: '{}')".format(
            fname, _path))

    # Load Existing Save
    # ------------------
    if load_saved:
        # fname = FILENAME_BH_HOSTS_SNAP_TABLE(run, snap, version)
        log.info("Loading from save '{}'".format(fname))
        # Make sure path exists
        if os.path.exists(fname):
            host_table = h5py.File(fname, 'r')
        else:
            log.warning(
                "File '{}' does not Exist.  Reconstructing.".format(fname))
            load_saved = False

    # Reconstruct Hosts Table
    # -----------------------
    if not load_saved:
        log.info("Constructing Offset Table for Snap {}".format(snap))
        COSMO = Illustris_Cosmology_TOS()

        if version is not None:
            log.raise_error("Can only create version '{}'".format(_VERSION))

        # Construct Offset Data
        beg = datetime.now()
        halo_nums, subh_nums, offsets = _construct_offset_table(run, snap, log)
        log.after("Loaded {} entries".format(len(halo_nums)), beg, beg_all)

        # Construct BH index Data
        #     Catch errors for bad snapshots
        try:
            bh_inds, bh_ids = _construct_bh_index_table(run, snap, log)
        except:
            # If this is a known bad snapshot, set values to None
            if snap in COSMO.GET_BAD_SNAPS(run):
                log.info("bad snapshot: run {}, snap {}".format(run, snap))
                bh_inds = None
                bh_ids = None
                bh_halos = None
                bh_subhs = None
            # If this is not a known problem, still raise error
            else:
                log.error(
                    "this is not a known bad snapshot: run {}, snap {}".format(
                        run, snap))
                raise

        # On success, Find BH Subhalos
        else:
            bin_inds = np.digitize(bh_inds, offsets[:, PARTICLE.BH]).astype(
                DTYPE.INDEX) - 1
            if any(bin_inds < 0):
                log.raise_error("Some bh_inds not matched!! '{}'".format(
                    str(bin_inds)))

            bh_halos = halo_nums[bin_inds]
            bh_subhs = subh_nums[bin_inds]

        # Save To Dict
        # ------------
        log.info("Writing snapshot bh-host table to file '{}'".format(fname))
        beg = datetime.now()
        with h5py.File(fname, 'w') as host_table:
            # Metadata
            host_table.attrs[OFFTAB.RUN] = run
            host_table.attrs[OFFTAB.SNAP] = snap
            host_table.attrs[OFFTAB.VERSION] = _VERSION
            host_table.attrs[OFFTAB.CREATED] = datetime.now().ctime()
            host_table.attrs[OFFTAB.FILENAME] = fname

            # BH Data
            host_table.create_dataset(OFFTAB.BH_INDICES, data=bh_inds)
            host_table.create_dataset(OFFTAB.BH_IDS, data=bh_ids)
            host_table.create_dataset(OFFTAB.BH_HALOS, data=bh_halos)
            host_table.create_dataset(OFFTAB.BH_SUBHALOS, data=bh_subhs)

        log.after("Saved to '{}', size {}".format(fname,
                                                  zio.get_file_size(fname)),
                  beg,
                  beg_all,
                  lvl=log.WARNING)
        host_table = h5py.File(fname, 'r')

    return host_table
示例#6
0
def _load_bh_hosts_table(run, log=None, load_saved=True, version=None):
    """Merge individual snapshot's blackhole hosts files into a single file.

    Arguments
    ---------
    run      <int>  : illustris simulation number {1, 3}
    load_saved <bool> : optional, load existing save if possible
    version  <flt>  : optional, target version number

    Returns
    -------
    bh_hosts <dict> : table of hosts for all snapshots

    """
    # log = check_log(log, run=run)
    log.debug("particle_hosts._load_bh_hosts_table()")
    beg_all = datetime.now()

    fname_bh_hosts = _get_filename_bh_hosts_table(run)
    _path = zio.check_path(fname_bh_hosts)
    if not os.path.isdir(_path):
        log.raise_error("Error with path for '{}' (path: '{}')".format(
            fname_bh_hosts, _path))

    # Load Existing Save
    # ------------------
    if load_saved:
        log.info("Loading from save '{}'".format(fname_bh_hosts))
        # Make sure path exists
        if os.path.exists(fname_bh_hosts):
            hosts_table = h5py.File(fname_bh_hosts, 'r')
        else:
            log.warning("File '{}' does not Exist.  Reconstructing.".format(
                fname_bh_hosts))
            load_saved = False

    # Reconstruct Hosts Table
    # -----------------------
    if not load_saved:
        log.info("Constructing Hosts Table")
        COSMO = Illustris_Cosmology_TOS()

        if version is not None:
            log.raise_error("Can only create version '{}'".format(_VERSION))

        # Select the dict-keys for snapshot hosts to transfer
        host_keys = [
            OFFTAB.BH_IDS, OFFTAB.BH_INDICES, OFFTAB.BH_HALOS,
            OFFTAB.BH_SUBHALOS
        ]

        # Save To HDF5
        # ------------
        log.info("Writing bh-host table to file '{}'".format(fname_bh_hosts))
        beg = datetime.now()
        with h5py.File(fname_bh_hosts, 'w') as host_table:
            # Metadata
            host_table.attrs[OFFTAB.RUN] = run
            host_table.attrs[OFFTAB.VERSION] = _VERSION
            host_table.attrs[OFFTAB.CREATED] = datetime.now().ctime()
            host_table.attrs[OFFTAB.FILENAME] = fname_bh_hosts

            for snap in tqdm.trange(COSMO.NUM_SNAPS, desc="Loading snapshots"):
                # Load Snapshot BH-Hosts
                htab_snap = _load_bh_hosts_snap_table(run,
                                                      snap,
                                                      log,
                                                      load_saved=True)
                # Extract and store target data
                snap_str = "{:03d}".format(snap)

                # Create a group for this snapshot
                snap_group = host_table.create_group(snap_str)
                # Transfer all parameters over
                for hkey in host_keys:
                    snap_group.create_dataset(hkey, data=htab_snap[hkey][:])

        log.after("Saved to '{}', size {}".format(
            fname_bh_hosts, zio.get_file_size(fname_bh_hosts)),
                  beg,
                  beg_all,
                  lvl=log.WARNING)
        host_table = h5py.File(fname_bh_hosts, 'r')

    return hosts_table