def plot_dm_by_dt_fesc(snapshot):
    import random
    import matplotlib.pylab as plt
    from seren3.scripts.mpi import write_fesc_hid_dict

    fesc_db = write_fesc_hid_dict.load_db(snapshot.path, snapshot.ioutput)
    mass_flux_db = load_db(snapshot.path, snapshot.ioutput)

    hids = fesc_db.keys()
    fesc = np.zeros(len(hids))
    outflow_dm_by_dt = np.zeros(len(hids))

    i = 0
    for hid in hids:
        ifesc = fesc_db[hid]["fesc"]
        if (ifesc > 1.):
            ifesc = random.uniform(0.9, 1.0)
        fesc[i] = ifesc
        F, F_plus, F_minus = mass_flux_db[hid]["F"]
        outflow_dm_by_dt[i] = F_plus
        i += 1

    plt.scatter(outflow_dm_by_dt, fesc)
    plt.ylabel(r"f$_{\mathrm{esc}}$")
    plt.xlabel(r"$dM/dt$ [M$_{\odot}$/h]")
    plt.show()
示例#2
0
def load_fesc(snapshot):

    db = write_fesc_hid_dict.load_db(snapshot.path, snapshot.ioutput)
    hids = db.keys()

    mvir = np.zeros(len(hids))
    fesc = np.zeros(len(hids))
    nphotons = np.zeros(len(hids))

    count = 0.0
    for i in range(len(hids)):
        hid = hids[i]
        res = db[hid]

        ifesc = res["fesc"]
        if ifesc > 1. and ifesc < 10.:
            # if ifesc > 1.:
            fesc[i] = random.uniform(0.9, 1.0)
            count += 1.0
        elif ifesc > 0. and ifesc <= 1.:
            fesc[i] = ifesc
        else:
            #         if (res["fesc"] > 10.):
            #             print "%e   %e" % (res["tot_mass"], res["fesc"])
            continue
        mvir[i] = res["hprops"]["mvir"]

        Nion_d_now = db[hid]["Nion_d_now"].in_units("s**-1 Msol**-1")
        star_mass = db[hid]["star_mass"].in_units("Msol")
        nphotons[i] = (Nion_d_now * star_mass).sum()

    print count / float(len(mvir))
    print len(mvir)

    ix = np.where(fesc > 0)
    fesc = fesc[ix]
    mvir = mvir[ix]
    nphotons = nphotons[ix]

    log_mvir = np.log10(mvir)
    log_fesc = np.log10(fesc)

    # ix = np.where(np.logical_and(log_mvir >= 7.5, np.log10(fesc*100.) >= -1))
    # ix = np.where(log_mvir >= 7.5)
    # log_mvir = log_mvir[ix]
    # fesc = fesc[ix]
    # nphotons = nphotons[ix]

    ix = np.where(~np.isnan(fesc))
    log_mvir = log_mvir[ix]
    fesc = fesc[ix]
    nphotons = nphotons[ix]

    print 'Loaded data for %d halos' % len(log_mvir)
    return log_mvir, fesc, nphotons
示例#3
0
def main(path, pickle_path):
    import random
    import numpy as np
    import seren3
    from seren3.analysis.parallel import mpi
    from seren3.exceptions import NoParticlesException
    from seren3.analysis.escape_fraction import time_integrated_fesc
    from seren3.scripts.mpi import write_fesc_hid_dict

    mpi.msg("Loading simulation...")
    sim = seren3.init(path)

    iout_start = max(sim.numbered_outputs[0], 60)
    back_to_aexp = sim[60].info["aexp"]
    # iouts = range(iout_start, max(sim.numbered_outputs)+1)
    print "IOUT RANGE HARD CODED"
    iouts = range(iout_start, 109)
    # iouts = [109]

    for iout in iouts[::-1]:
        snap = sim[iout]
        mpi.msg("Working on snapshot %05i" % snap.ioutput)
        snap.set_nproc(1)
        halos = snap.halos(finder="ctrees")

        db = write_fesc_hid_dict.load_db(path, iout)
        
        halo_ids = None
        if mpi.host:
            halo_ids = db.keys()
            random.shuffle(halo_ids)

        dest = {}
        for i, sto in mpi.piter(halo_ids, storage=dest, print_stats=True):
            h = halos.with_id(i)
            res = time_integrated_fesc(h, back_to_aexp, db=db, return_data=True)
            if (res is not None):
                mpi.msg("%05i \t %i \t %i" % (snap.ioutput, h.hid, i))
                tint_fesc_hist, I1, I2, lbtime, hids, iouts = res

                fesc = I1/I2
                sto.idx = h.hid
                sto.result = {'tint_fesc_hist' : tint_fesc_hist, 'fesc' : fesc, 'I1' : I1, \
                        'I2' : I2, 'lbtime' : lbtime, 'Mvir' : h["Mvir"], 'hids' : hids, 'iouts' : iouts}
        if mpi.host:
            import pickle, os
            # pickle_path = "%s/pickle/%s/" % (snap.path, halos.finder)
            if not os.path.isdir(pickle_path):
                os.mkdir(pickle_path)
            pickle.dump( mpi.unpack(dest), open("%s/time_int_fesc_all_halos_%05i.p" % (pickle_path, snap.ioutput), "wb") )

        mpi.msg("Waiting...")
        mpi.comm.Barrier()
示例#4
0
def load_tint_fesc(snapshot):
    #     fname = "%s/pickle/ConsistentTrees/fesc_database_%05i.p" % (snapshot.path, snapshot.ioutput)
    fname = "%s/pickle/ConsistentTrees/time_int_fesc_all_halos_%05i.p" % (
        snapshot.path, snapshot.ioutput)
    data = pickle.load(open(fname, "rb"))

    db = write_fesc_hid_dict.load_db(snapshot.path, snapshot.ioutput)

    mvir = []
    fesc = []
    tint_fesc = []
    # hids = []
    nphotons = []

    for i in range(len(data)):
        res = data[i].result

        ifesc = res["fesc"][0]
        if ifesc > 1. and ifesc <= 10.:
            fesc.append(random.uniform(0.9, 1.0))
        elif ifesc > 0. and ifesc <= 1.:
            fesc.append(ifesc)
        else:
            #         if (res["fesc"] > 10.):
            #             print "%e   %e" % (res["tot_mass"], res["fesc"])
            continue
        mvir.append(res["Mvir"])
        tint_fesc.append(res["tint_fesc_hist"][0])
        hid = int(data[i].idx)
        # hids.append(hid)

        Nion_d_now = db[hid]["Nion_d_now"].in_units("s**-1 Msol**-1")
        star_mass = db[hid]["star_mass"].in_units("Msol")
        nphotons.append((Nion_d_now * star_mass).sum())

    mvir = np.array(mvir)
    fesc = np.array(fesc)
    tint_fesc = np.array(tint_fesc)
    # hids = np.array(hids)
    nphotons = np.array(nphotons)

    #     print count/float(len(mvir))

    ix = np.where(fesc > 0)
    fesc = fesc[ix]
    tint_fesc = tint_fesc[ix]
    mvir = mvir[ix]
    # hids = hids[ix]
    nphotons = nphotons[ix]

    log_mvir = np.log10(mvir)
    log_fesc = np.log10(fesc)

    # ix = np.where(np.logical_and(log_mvir >= 7., log_fesc > -3))
    ix = np.where(log_mvir >= 7.5)
    log_mvir = log_mvir[ix]
    fesc = fesc[ix]
    tint_fesc = tint_fesc[ix]
    # hids = hids[ix]
    nphotons = nphotons[ix]

    ix = np.where(~np.isnan(tint_fesc))
    log_mvir = log_mvir[ix]
    fesc = fesc[ix]
    tint_fesc = tint_fesc[ix]
    # hids = hids[ix]
    nphotons = nphotons[ix]

    print 'Loaded data for %d halos' % len(log_mvir)
    return log_mvir, fesc, tint_fesc, nphotons
示例#5
0
def time_integrated_fesc(halo, back_to_aexp, return_data=True, **kwargs):
    '''
    Computes the time integrated escapte fraction across
    the history of the halo, a la Kimm & Cen 2014
    '''
    import random
    import numpy as np
    from seren3.scripts.mpi import write_fesc_hid_dict

    db = kwargs.pop("db", write_fesc_hid_dict.load_db(halo.base.path, halo.base.ioutput))
    if (int(halo["id"]) in db.keys()):
        catalogue = halo.base.halos(finder="ctrees")

        # dicts to store results
        fesc_dict = {}
        Nphoton_dict = {}
        age_dict = {}
        hid_dict = {}

        def _compute(h, db):
            hid = int(h["id"])
            result = db[hid]

            fesc_h = result["fesc"]

            if (fesc_h > 1.):
                fesc_h = random.uniform(0.9, 1.0)
            Nphotons = (result["Nion_d_now"] * result["star_mass"].in_units("Msol")).sum()

            fesc_dict[h.base.ioutput] = fesc_h
            Nphoton_dict[h.base.ioutput] = Nphotons # at t=0, not dt=rvir/c !!!
            age_dict[h.base.ioutput] = h.base.age
            hid_dict[h.base.ioutput] = hid

        # Compute fesc for this halo (snapshot)
        _compute(halo, db)
        # Iterate through the most-massive progenitor line
        for prog in catalogue.iterate_progenitors(halo, back_to_aexp=back_to_aexp):
            prog_db = write_fesc_hid_dict.load_db(prog.base.path, prog.base.ioutput)
            # print prog

            if (int(prog["id"]) in prog_db.keys()):
                _compute(prog, prog_db)
            else:
                break

        # I1/I2 = numerator/denominator to be integrated
        I1 = np.zeros(len(fesc_dict)); I2 = np.zeros(len(fesc_dict)); age_array = np.zeros(len(age_dict))
        hid_array = np.zeros(len(fesc_dict), dtype=np.int64)
        iout_arr = np.zeros(len(hid_array))

        # Populate the arrays
        for key, i in zip( sorted(fesc_dict.keys(), reverse=True), range(len(fesc_dict)) ):
            I1[i] = fesc_dict[key] * Nphoton_dict[key]
            I2[i] = Nphoton_dict[key]
            age_array[i] = age_dict[key]
            hid_array[i] = hid_dict[key]
            iout_arr[i] = key

        # Calculate lookback-time
        lbtime = halo.base.age - age_array

        # Integrate back in time for each snapshot
        tint_fesc_hist = np.zeros(len(lbtime))
        for i in xrange(len(tint_fesc_hist)):
            tint_fesc_hist[i] = integrate_fesc( I1[i:], I2[i:], lbtime[i:] )

        # fesc at each time step can be computed by taking I1/I2
        if return_data:    
            return tint_fesc_hist, I1, I2, lbtime, hid_array, iout_arr
        return tint_fesc_hist
    else:
        return None
示例#6
0
def halo_photon_relative_contribution(simulation,
                                      ioutputs,
                                      the_mass_bins=[8., 8.5, 9., 9.5, 10.]):
    import random
    from scipy import interpolate
    from seren3.scripts.mpi import write_fesc_hid_dict

    if not isinstance(the_mass_bins, np.ndarray):
        the_mass_bins = np.array(the_mass_bins)

    pickle_dir = "%s/pickle/ConsistentTrees/" % simulation.path

    snapshot = simulation[ioutputs[-1]]
    age_now = snapshot.age

    binned_data = [
        MassBin(i, the_mass_bins[i]) for i in range(len(the_mass_bins))
    ]
    binned_data.append(MassBin(len(the_mass_bins) + 1,
                               "All"))  # Bin to contain all halos

    def _store(mass_bin, tint_fesc_arr, Nion_d_sum, lbtime, z):
        mass_bin.tint_fesc_mean.append(tint_fesc_arr.mean())
        mass_bin.Nion_d_sum.append(Nion_d_sum.sum())
        # mass_bin.std.append(tint_fesc_arr.std())
        mass_bin.lbtime.append(lbtime)
        mass_bin.z.append(z)

    for ioutput in ioutputs:
        print ioutput
        snapshot = simulation[ioutput]
        data = write_fesc_hid_dict.load_db(simulation.path, ioutput)

        # Compute quantities
        lbtime = age_now - snapshot.age
        Nion_esc = np.zeros(len(data))
        Mvir = np.zeros(len(data))
        for i in range(len(data.keys())):
            hid = data.keys()[i]
            res = data[hid]
            fesc_h = res["fesc"]
            if (fesc_h > 1.):
                fesc_h = random.uniform(0.9, 1.0)

            Nion_esc[i] = fesc_h * (res["Nion_d_now"] *
                                    res["star_mass"].in_units("Msol")).sum()
            Mvir[i] = res["hprops"]["mvir"]

        # Bin data
        mass_bins = np.digitize(np.log10(Mvir), the_mass_bins, right=True)
        lbtime_arr = np.ones(len(Nion_esc)) * lbtime
        # Do the binning
        for mbin in binned_data:
            if mbin.mass_bin == "All":
                mbin.extend(lbtime_arr, Nion_esc)
            else:
                idx = np.where(mass_bins == mbin.idx)
                mbin.extend(lbtime_arr[idx], Nion_esc[idx])

    # return binned_data

    nbins = 10
    # Compute the relative contributions
    bc_all, sy_all = binned_data[-1].sum(nbins=nbins)
    fn_all = interpolate.interp1d(bc_all,
                                  np.log10(sy_all),
                                  fill_value="extrapolate")
    for i in range(len(binned_data) - 1):
        mbin = binned_data[i]
        bc, sy = mbin.sum(nbins=nbins)
        mbin.relative_contribution = sy / 10**fn_all(bc)
        nbins -= 2
    binned_data[-1].relative_contribution = np.ones(len(sy))

    return binned_data