Beispiel #1
0
def test_total_baryon_mass():
    # gather most recent data set
    sim = sim_dir_load(_pf_name, path=_dir_name, find_outputs=True)

    if (sim.parameters['CosmologySimulationOmegaBaryonNow'] == 0.0):
        return

    sim.get_time_series()
    ds = sim[-1]
    data = ds.all_data()

    # sum masses
    Mstar = np.sum(
        data['particle_mass'][data['particle_type'] == 2].to('Msun'))
    Mgas = np.sum(data['cell_mass'].to('Msun'))

    output_data = {'masses': Mstar + Mgas}

    # save
    filename = "baryon_mass_results.h5"
    save_filename = os.path.join(_dir_name, filename)
    yt.save_as_dataset(ds, save_filename, output_data)

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

    ds_comp = yt.load(compare_filename)
    assert_rel_equal(output_data['masses'], ds_comp.data['masses'], tolerance)
def test_dark_matter_mass():
    # gather most recent data set
    sim = sim_dir_load(_pf_name, path=_dir_name, find_outputs=True)
    sim.get_time_series()
    ds = sim[-1]
    data = ds.all_data()

    # sum masses
    MDM = np.sum(
        data[('all',
              'particle_mass')][data[('all',
                                      'particle_type')] == 1].to('Msun'))

    output_data = {('data', 'mass'): MDM}

    # save
    filename = "DM_mass_results.h5"
    save_filename = os.path.join(_dir_name, filename)
    yt.save_as_dataset(ds, save_filename, output_data)

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

    ds_comp = yt.load(compare_filename)
    assert_rel_equal(output_data[('data', 'mass')],
                     ds_comp.data[('data', 'mass')], tolerance)
Beispiel #3
0
    def test_unequal_bin_field_profile(self):
        density = np.random.random(128)
        temperature = np.random.random(127)
        mass = np.random.random((128, 128))

        my_data = {
            ("gas", "density"): density,
            ("gas", "temperature"): temperature,
            ("gas", "mass"): mass,
        }
        fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")}
        field_types = {field: "gas" for field in my_data.keys()}
        yt.save_as_dataset(fake_ds_med,
                           "mydata.h5",
                           my_data,
                           field_types=field_types)

        ds = yt.load("mydata.h5")

        with assert_raises(YTProfileDataShape):
            yt.PhasePlot(
                ds.data,
                ("gas", "temperature"),
                ("gas", "density"),
                ("gas", "mass"),
            )
def test_phase():
    es = sim_dir_load(_pf_name, path=_dir_name)
    es.get_time_series(redshifts=[0])
    ds = es[-1]
    ad = ds.all_data()
    profile = ad.profile([("gas", "density")], [("gas", "temperature"),
                                                ("gas", "cell_mass")])
    profile1 = ad.profile([("gas", "density")], [("gas", "temperature"),
                                                 ("gas", "cooling_time")],
                          weight_field=('gas', 'cell_mass'))
    density = profile.x
    temperature = profile[('gas', 'temperature')]
    cooling_time = profile1[('gas', 'cooling_time')]
    cell_mass = profile[('gas', 'cell_mass')]

    filename = 'phase_data.h5'
    save_filename = os.path.join(_dir_name, filename)
    data = {
        ('data', 'density'): density,
        ('data', 'temperature'): temperature,
        ('data', 'cooling_time'): cooling_time,
        ('data', 'cell_mass'): cell_mass
    }
    yt.save_as_dataset(ds, save_filename, data)

    pp = yt.PhasePlot(ad, ('gas', 'density'), ('gas', 'temperature'),
                      ('gas', 'cell_mass'))
    pp.set_unit(('gas', 'cell_mass'), 'Msun')
    pp.save(_dir_name)
    pp1 = yt.PhasePlot(ad, ('gas', 'density'), ('gas', 'temperature'),
                       ('gas', 'cooling_time'),
                       weight_field=('gas', 'cell_mass'))
    pp1.save(_dir_name)

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

        # do the comparison
    ds_comp = yt.load(compare_filename)

    # assert quality to 8 decimals
    assert_rel_equal(data[('data', 'density')],
                     ds_comp.data[('data', 'density')], 8)
    assert_rel_equal(data[('data', 'temperature')],
                     ds_comp.data[('data', 'temperature')], 8)
    assert_rel_equal(data[('data', 'cooling_time')],
                     ds_comp.data[('data', 'cooling_time')], 8)
    assert_rel_equal(data[('data', 'cell_mass')],
                     ds_comp.data[('data', 'cell_mass')], 8)
Beispiel #5
0
def test_max_density_halo_quantities():
    ds = yt.load(os.path.join(_dir_name, 'RD0009/RD0009'))

    # Find the point of maximum density, center a sphere of radius
    # 1 Mpc around it, and sum the masses inside
    val, pos = ds.find_max('Density')
    sp = ds.sphere(pos, (1000., 'kpc'))
    ct = sp['creation_time']
    dm = (ct < 0)
    dm_mass = np.sum(sp['particle_mass'][dm]).in_units('Msun')
    gas_mass = np.sum(sp['cell_mass'].in_units('Msun'))

    # Also look at the radial profiles of density and temperature
    # within these spheres. The bin size is chosen to make the profiles
    # smooth and for each bin to be larger than the cell size.
    ptest0 = yt.create_profile(sp, "radius", "density", n_bins=[20])
    ptest1 = yt.create_profile(sp, "radius", "temperature", n_bins=[20])

    # Save the quantities to be compared
    data = {
        "dm_mass": dm_mass,
        "gas_mass": gas_mass,
        "max_position": pos,
        "density_profile": ptest0['density'],
        "temperature_profile": ptest1['temperature']
    }

    # save your results file
    filename = "max_density_halo_quantities.h5"
    save_filename = os.path.join(_dir_name, filename)
    yt.save_as_dataset(ds, save_filename, data)

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

    ds_comp = yt.load(compare_filename)
    assert_rel_equal(data["dm_mass"], ds_comp.data["dm_mass"], tolerance)
    assert_rel_equal(data["gas_mass"], ds_comp.data["gas_mass"], tolerance)
    assert_rel_equal(data["max_position"], ds_comp.data["max_position"],
                     tolerance)
    assert_rel_equal(data["density_profile"], ds_comp.data["density_profile"],
                     tolerance)
    assert_rel_equal(data["temperature_profile"],
                     ds_comp.data["temperature_profile"], tolerance)
Beispiel #6
0
    def test_unequal_bin_field_profile(self):
        density = np.random.random(128)
        temperature = np.random.random(127)
        cell_mass = np.random.random((128, 128))

        my_data = {
            "density": density,
            "temperature": temperature,
            "cell_mass": cell_mass}
        fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")}
        yt.save_as_dataset(fake_ds_med, "mydata.h5", my_data)

        ds = yt.load('mydata.h5')

        assert_raises(
            YTProfileDataShape,
            yt.PhasePlot, ds.data, 'temperature', 'density', 'cell_mass')
Beispiel #7
0
def test_hmf():
    es = sim_dir_load(_pf_name, path=_dir_name)
    es.get_time_series()
    ds = es[-1]
    hc = HaloCatalog(data_ds=ds,
                     finder_method='fof',
                     output_dir=os.path.join(_dir_name,
                                             "halo_catalogs/catalog"))
    hc.create()
    masses = hc.data_source['particle_mass'].in_units('Msun')
    h = ds.hubble_constant
    mtot = np.log10(masses * 1.2) - np.log10(h)
    masses_sim = np.sort(mtot)
    sim_volume = ds.domain_width.in_units('Mpccm').prod()
    n_cumulative_sim = np.arange(len(mtot), 0, -1)
    masses_sim, unique_indices = np.unique(masses_sim, return_index=True)

    n_cumulative_sim = n_cumulative_sim[unique_indices] / sim_volume
    filename = 'hmf.h5'
    save_filename = os.path.join(_dir_name, filename)
    data = {'masses': masses_sim, 'n_sim': n_cumulative_sim}
    yt.save_as_dataset(ds, save_filename, data)

    # make a plot
    fig = plt.figure(figsize=(8, 8))
    plt.semilogy(masses_sim, n_cumulative_sim, '-')
    plt.ylabel('Cumulative Halo Number Density $\mathrm{Mpc}^{-3}$',
               fontsize=16)
    plt.xlabel('log Mass/$\mathrm{M}_{\odot}$', fontsize=16)
    plt.tick_params(labelsize=16)
    plt.savefig(os.path.join(_dir_name, 'hmf.png'), format='png')

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

    # do the comparison
    ds_comp = yt.load(compare_filename)

    # assert quality to 8 decimals
    assert_rel_equal(data['masses'], ds_comp.data['masses'], 8)
    assert_rel_equal(data['n_sim'], ds_comp.data['n_sim'], 8)
Beispiel #8
0
def test_output_number():
    ds = yt.load(os.path.join(_dir_name, 'DD0000/DD0000'))

    DDnum = len(glob.glob(os.path.join(_dir_name, 'DD????/DD????')))
    RDnum = len(glob.glob(os.path.join(_dir_name, 'RD????/RD????')))

    output_data = {'number_of_files': np.array([DDnum, RDnum])}

    filename = "outputnum_results.h5"
    save_filename = os.path.join(_dir_name, filename)
    yt.save_as_dataset(ds, save_filename, output_data)

    compare_filename = os.path.join(test_data_dir, filename)
    if generate_answers:
        os.rename(save_filename, compare_filename)
        return

    ds_comp = yt.load(compare_filename)
    assert_equal(output_data['number_of_files'],
                 ds_comp.data['number_of_files'])
Beispiel #9
0
def save_simulation(es, filename=None):
    def to_arr(my_list):
        if hasattr(my_list[0], "units"):
            f = es.arr
        else:
            f = np.array
        return f(my_list)
    fields = ["filename", "time"]
    ex_keys = ["box_size", "initial_time", "final_time"]
    if es.cosmological_simulation:
        fields.append("redshift")
        ex_keys.extend(["initial_redshift", "final_redshift"])
    data = dict((field, to_arr([d[field] for d in es.all_outputs]))
                for field in fields)
    for i in range(data["filename"].size):
        if data["filename"][i].startswith("./"):
            data["filename"][i] = data["filename"][i][2:]
    if filename is None:
        filename = str(es)
        filename = filename[:filename.rfind(".")] + ".h5"
    extra_attrs = dict((field, getattr(es, field))
                       for field in ex_keys)
    yt.save_as_dataset(es, filename, data, extra_attrs=extra_attrs)
Beispiel #10
0
        def wrapper(*args):
            # name the file after the function
            filename = "%s.h5" % func.__name__
            result_filename = os.path.join(test_results_dir, filename)

            # check that answers exist
            if not generate_results:
                assert os.path.exists(result_filename), \
                  "Result file, %s, not found!" % result_filename

            data = func(*args)
            fn = yt.save_as_dataset({}, filename=filename, data=data)

            # if generating, move files to results dir
            if generate_results:
                os.rename(filename, result_filename)
            # if comparing, run the comparison
            else:
                ytdataset_compare(
                    filename, result_filename,
                    compare_func=compare_func, **kwargs)
                            bhinfo["dt"].append(
                                es.data["time"][min(i + 1, fns.size - 1)] -
                                es.data["time"][i])
                            print("In clump %s (cell %d): %d." %
                                  (str(my_clump), d.argmin(), bhids[ib]))
                            break

                clump_dmin.append(min_sep.to("pc"))
            # save dataset with bh-clump distances
            if cfn == os.path.join(data_dir,
                                   "%s/RD0077/halo_2170858_clumps.h5"):
                distance_fn = "%s_distances.h5" % cfn[:-3]
                distance_data = \
                  {"particle_index": bhids,
                   "clump_distances": ds.arr(clump_dmin)}
                yt.save_as_dataset(ds, distance_fn, distance_data)
            my_dmin.extend(clump_dmin)
            del ds
            pbar.update(1)
        pbar.finish()

        sto.result_id = i
        if my_dmin:
            my_dmin = yt.YTArray(my_dmin)
            print(my_dmin.min(), my_dmin.max())

            sto.result = [contained, get_distribution(my_dmin)]

    if yt.is_root():
        ids = []
        all_dmin = []
def find_stars(ds, filename, min_level=4):
    fields = [
        "particle_mass", "particle_index", "particle_type",
        "particle_position_x", "particle_position_y", "particle_position_z",
        "particle_velocity_x", "particle_velocity_y", "particle_velocity_z",
        "creation_time", "metallicity_fraction"
    ]
    bfields = [
        "density", "temperature", "sound_speed", "velocity_x", "velocity_y",
        "velocity_z", "dx"
    ]
    data = defaultdict(list)

    Zcr = ds.parameters['PopIIIMetalCriticalFraction']

    ns = 0
    if yt.is_root():
        pbar = yt.get_pbar("Reading grids", ds.index.grids.size, parallel=True)
    for i, grid in enumerate(ds.index.grids):
        if ds.index.grid_levels[i][0] >= min_level:
            ct = grid["creation_time"]
            stars = (ct > 0)
            if not stars.any():
                grid.clear_data()
                continue

            Zfr = grid["metallicity_fraction"]
            stars &= (Zfr < Zcr)
            if not stars.any():
                grid.clear_data()
                continue

            pt = grid["particle_type"]
            stars &= ((pt == 1) | (pt == 5))
            if not stars.any():
                grid.clear_data()
                continue

            # mass is multiplied by 1e-20 when main-sequence lifetime is over
            mass = grid["particle_mass"].to("Msun")
            mass[mass < 1e-9] *= 1e20
            stars &= (((mass >= 25) & (mass <= 140)) | (mass >= 260))
            if stars.any():
                ns += stars.sum()
                for field in fields:
                    data[field].append(grid[field][stars])
            grid.clear_data()
        if yt.is_root():
            pbar.update(i)
    if yt.is_root():
        pbar.finish()

    ndata = {}
    if len(data["particle_mass"]) > 0:
        for field in fields:
            a = ds.arr(np.empty(ns), data[field][0].units)
            ip = 0
            for chunk in data[field]:
                a[ip:ip + chunk.size] = chunk
                ip += chunk.size
            ndata[field] = a

        yt.mylog.info("Getting %d point field values for %s." %
                      (ndata["particle_mass"].size, str(ds)))

        p = ds.arr([ndata["particle_position_%s" % ax] for ax in "xyz"])
        p = np.rollaxis(p, 1)
        bdatal = ds.find_field_values_at_points(bfields, p)
        bdata = dict((field, bdatal[i]) for i, field in enumerate(bfields))
        ndata.update(bdata)

    con_args = ["center", "left_edge", "right_edge"]
    extra_attrs = dict(
        (field, getattr(ds, "domain_%s" % field)) for field in con_args)
    extra_attrs["con_args"] = con_args
    extra_attrs["data_type"] = "yt_data_container"
    extra_attrs["container_type"] = "region"
    extra_attrs["dimensionality"] = 3
    ftypes = dict((field, "star") for field in fields + bfields)
    yt.save_as_dataset(ds,
                       filename,
                       ndata,
                       field_types=ftypes,
                       extra_attrs=extra_attrs)
Beispiel #13
0
if __name__ == "__main__":
    es = yt.load(sys.argv[1])
    fns = es.data['filename'].astype(str)[::-1]
    data_dir = es.directory

    for fn in yt.parallel_objects(fns, njobs=-1, dynamic=True):
        ds = yt.load(os.path.join(data_dir, fn))

        output_file = os.path.join("pop3", f"{ds.basename}.h5")
        if os.path.exists(output_file):
            continue

        add_p2p_particle_filters(ds)
        region = ds.box(ds.parameters["RefineRegionLeftEdge"],
                        ds.parameters["RefineRegionRightEdge"])

        fields = [
            "particle_mass", "particle_index", "particle_type",
            "particle_position_x", "particle_position_y",
            "particle_position_z", "particle_velocity_x",
            "particle_velocity_y", "particle_velocity_z", "creation_time",
            "metallicity_fraction"
        ]
        data = dict((field, region[('pop3', field)]) for field in fields)
        ftypes = dict((field, 'pop3') for field in fields)

        yt.save_as_dataset(ds,
                           filename=output_file,
                           data=data,
                           field_types=ftypes)
Beispiel #14
0
from collections import defaultdict
import numpy as np
import sys
import yt

if __name__ == "__main__":
    fns = [line.strip() for line in open('pfs.dat', 'r').readlines()]

    parameters = ['RefineRegionLeftEdge', 'RefineRegionRightEdge']

    data = defaultdict(list)
    for fn in fns:
        ds = yt.load(fn)
        data['filename'].append(fn)
        data['time'].append(ds.current_time.to('Myr'))
        data['redshift'].append(ds.current_redshift)
        for par in parameters:
            data[par].append(ds.parameters[par])

    data['filename'] = np.array(data['filename'])
    data['time'] = ds.arr(data['time'])
    data['redshift'] = np.array(data['redshift'])
    data['RefineRegionLeftEdge'] = ds.arr(data['RefineRegionLeftEdge'],
                                          'unitary')
    data['RefineRegionRightEdge'] = ds.arr(data['RefineRegionRightEdge'],
                                           'unitary')

    yt.save_as_dataset(ds, filename='simulation.h5', data=data)
Beispiel #15
0
    if os.environ.get("METAL_COOLING", 0) == "1":
        plots.extend(
            pyplot.loglog(data["density"],
                          data["dust_temperature"],
                          color="black",
                          linestyle="--",
                          label="T$_{dust}$"))
    pyplot.xlabel("$\\rho$ [g/cm$^{3}$]")
    pyplot.ylabel("T [K]")

    pyplot.twinx()
    plots.extend(
        pyplot.loglog(data["density"],
                      data["H2I"] / data["density"],
                      color="red",
                      label="f$_{H2}$"))
    pyplot.ylabel("H$_{2}$ fraction")
    pyplot.legend(plots, [plot.get_label() for plot in plots],
                  loc="lower right")

    if os.environ.get("METAL_COOLING", 0) == "1":
        output = "freefall_metal"
    else:
        output = "freefall"

    pyplot.tight_layout()
    pyplot.savefig("%s.png" % output)

    # save data arrays as a yt dataset
    yt.save_as_dataset({}, "%s.h5" % output, data)
Beispiel #16
0
    # timestepping safety factor
    safety_factor = 0.01

    # let gas cool at constant density
    data = evolve_constant_density(fc,
                                   final_time=final_time,
                                   safety_factor=safety_factor)

    p1, = pyplot.loglog(data["time"].to("Myr"),
                        data["temperature"],
                        color="black",
                        label="T")
    pyplot.xlabel("Time [Myr]")
    pyplot.ylabel("T [K]")

    data["mu"] = data["temperature"] / \
        (data["energy"] * (my_chemistry.Gamma - 1.) *
         fc.chemistry_data.temperature_units)
    pyplot.twinx()
    p2, = pyplot.semilogx(data["time"].to("Myr"),
                          data["mu"],
                          color="red",
                          label="$\\mu$")
    pyplot.ylabel("$\\mu$")
    pyplot.legend([p1, p2], ["T", "$\\mu$"], fancybox=True, loc="center left")
    pyplot.savefig("cooling_cell.png")

    # save data arrays as a yt dataset
    yt.save_as_dataset({}, "cooling_cell.h5", data)
def cooling_cell(density=12.2,
                 initial_temperature=2.0E4,
                 final_time=30.0,
                 metal_fraction=4.0E-4,
                 make_plot=False,
                 save_output=False,
                 primordial_chemistry=2,
                 outname=None,
                 save_H2_fraction=False,
                 return_result=False,
                 verbose=False,
                 H2_converge=None,
                 *args,
                 **kwargs):

    current_redshift = 0.

    # Set solver parameters
    my_chemistry = chemistry_data()
    my_chemistry.use_grackle = 1
    my_chemistry.with_radiative_cooling = 1
    my_chemistry.primordial_chemistry = primordial_chemistry
    my_chemistry.metal_cooling = 1
    my_chemistry.UVbackground = 1
    my_chemistry.self_shielding_method = 3

    if primordial_chemistry > 1:
        my_chemistry.H2_self_shielding = 2
        my_chemistry.h2_on_dust = 1
        my_chemistry.three_body_rate = 4

    grackle_dir = "/home/aemerick/code/grackle-emerick/"
    my_chemistry.grackle_data_file = os.sep.join(  #['/home/aemerick/code/grackle-emerick/input/CloudyData_UVB=HM2012.h5'])
        [grackle_dir, "input", "CloudyData_UVB=HM2012_shielded.h5"])

    # set the factors
    my_chemistry.LW_factor = kwargs.get("LW_factor", 1.0)
    my_chemistry.k27_factor = kwargs.get("k27_factor", 1.0)
    #if 'LW_factor' in kwargs.keys():
    #    my_chemistry.LW_factor = kwargs['LW_factor']
    #else:
    #    my_chemistry.LW_factor = 1.0

    #if 'k27_factor' in kwargs.keys():
    #    my_chemistry.k27_factor = kwargs['k27_factor']
    #else:
    #    my_chemistry.k27_factor = 1.0

    # Set units
    my_chemistry.comoving_coordinates = 0  # proper units
    my_chemistry.a_units = 1.0
    my_chemistry.a_value = 1. / (1. + current_redshift) / \
        my_chemistry.a_units
    my_chemistry.density_units = mass_hydrogen_cgs  # rho = 1.0 is 1.67e-24 g
    my_chemistry.length_units = cm_per_mpc  # 1 Mpc in cm
    my_chemistry.time_units = sec_per_Myr  # 1 Myr in s
    my_chemistry.velocity_units = my_chemistry.a_units * \
        (my_chemistry.length_units / my_chemistry.a_value) / \
        my_chemistry.time_units

    rval = my_chemistry.initialize()

    fc = FluidContainer(my_chemistry, 1)
    fc["density"][:] = density
    if my_chemistry.primordial_chemistry > 0:
        fc["HI"][:] = 0.76 * fc["density"]
        fc["HII"][:] = tiny_number * fc["density"]
        fc["HeI"][:] = (1.0 - 0.76) * fc["density"]
        fc["HeII"][:] = tiny_number * fc["density"]
        fc["HeIII"][:] = tiny_number * fc["density"]
    if my_chemistry.primordial_chemistry > 1:
        fc["H2I"][:] = tiny_number * fc["density"]
        fc["H2II"][:] = tiny_number * fc["density"]
        fc["HM"][:] = tiny_number * fc["density"]
        fc["de"][:] = tiny_number * fc["density"]
        fc['H2_self_shielding_length'][:] = 1.8E-6
    if my_chemistry.primordial_chemistry > 2:
        fc["DI"][:] = 2.0 * 3.4e-5 * fc["density"]
        fc["DII"][:] = tiny_number * fc["density"]
        fc["HDI"][:] = tiny_number * fc["density"]
    if my_chemistry.metal_cooling == 1:
        fc["metal"][:] = metal_fraction * fc["density"] * \
          my_chemistry.SolarMetalFractionByMass

    fc["x-velocity"][:] = 0.0
    fc["y-velocity"][:] = 0.0
    fc["z-velocity"][:] = 0.0

    fc["energy"][:] = initial_temperature / \
        fc.chemistry_data.temperature_units
    fc.calculate_temperature()
    fc["energy"][:] *= initial_temperature / fc["temperature"]

    # timestepping safety factor
    safety_factor = 0.001

    # let gas cool at constant density

    #if verbose:
    print("Beginning Run")
    data = evolve_constant_density(fc,
                                   final_time=final_time,
                                   H2_converge=H2_converge,
                                   safety_factor=safety_factor,
                                   verbose=verbose)
    #else:
    #    print "Beginning Run"

    #    with NoStdStreams():
    #        data = evolve_constant_density(
    #            fc, final_time=final_time, H2_converge = 1.0E-6,
    #            safety_factor=safety_factor)
    #    print "Ending Run"

    if make_plot:
        p1, = plt.loglog(data["time"].to("Myr"),
                         data["temperature"],
                         color="black",
                         label="T")
        plt.xlabel("Time [Myr]")
        plt.ylabel("T [K]")

        data["mu"] = data["temperature"] / \
            (data["energy"] * (my_chemistry.Gamma - 1.) *
             fc.chemistry_data.temperature_units)
        plt.twinx()
        p2, = plt.semilogx(data["time"].to("Myr"),
                           data["mu"],
                           color="red",
                           label="$\\mu$")
        plt.ylabel("$\\mu$")
        plt.legend([p1, p2], ["T", "$\\mu$"], fancybox=True, loc="center left")
        plt.savefig("cooling_cell.png")

    # save data arrays as a yt dataset
    if outname is None:
        outname = 'cooling_cell_%.2f_%.2f' % (my_chemistry.k27_factor,
                                              my_chemistry.LW_factor)

    if save_output:

        yt.save_as_dataset({}, outname + '.h5', data)

    if my_chemistry.primordial_chemistry > 1:
        H2_fraction = (data['H2I'] + data['H2II']) / data['density']
    else:
        H2_fraction = np.zeros(np.size(data['density']))

    if save_H2_fraction:
        #np.savetxt(outname + ".dat", [data['time'], H2_fraction])

        f = open("all_runs_d_%.2f.dat" % (density), "a")
        #        f.write("# k27 LW f_H2 T time\n")
        f.write("%8.8E %8.8E %8.8E %8.8E %8.8E \n" %
                (my_chemistry.k27_factor, my_chemistry.LW_factor,
                 H2_fraction[-1], data['temperature'][-1], data['time'][-1]))
        f.close()

    if return_result:
        return data
    else:
        return
Beispiel #18
0
def make_onezone_ray(density=1e-26,
                     temperature=1000,
                     metallicity=0.3,
                     length=10,
                     redshift=0,
                     filename='ray.h5',
                     column_densities=None):
    """
    Create a one-zone ray object for use as test data.  The ray
    consists of a single absorber of hydrodynamic characteristics 
    specified in the function kwargs.  It makes an excellent test dataset 
    to test Trident's capabilities for making absorption spectra.

    You can specify the column densities of different ions explicitly using
    the column_densities keyword, or you can let Trident calculate the 
    different ion columns internally from the density, temperature, and 
    metallicity fields.

    Using the defaults will produce a ray that should result in a spectrum 
    with a good number of absorption features.

    **Parameters**

    :density: float, optional

        The gas density value of the ray in g/cm**3
        Default: 1e-26

    :temperature: float, optional

        The gas temperature value of the ray in K
        Default: 10**3

    :metallicity: float, optional

        The gas metallicity value of the ray in Zsun
        Default: 0.3

    :length: float, optional

        The length of the ray in kpc
        Default: 10.

    :redshift: float, optional

        The redshift of the ray
        Default: 0

    :filename: string, optional

        The filename to which to save the ray to disk.  Due to the 
        mechanism for passing rays, the ray data must be saved to disk.
        Default: 'ray.h5'

    :column_densities: dict, optional

        The user can create a dictionary which adds more number density ion 
        fields to the ray.  Each key in the dictionary should be the desired 
        ion field name according to the field name format:
        i.e.  "<ELEMENT>_p<IONSTATE>_number_density" 
        e.g. neutral hydrogen = "H_p0_number_density".  
        The corresponding value for each key should be the desired column 
        density of that ion in cm**-2.  See example below.
        Default: None

    **Returns**

        A YT LightRay object

    **Example**

    Create a one-zone ray, and generate a COS spectrum from that ray.

    >>> import trident
    >>> ray = trident.make_onezone_ray()
    >>> sg = trident.SpectrumGenerator('COS')
    >>> sg.make_spectrum(ray)
    >>> sg.plot_spectrum('spec_raw.png')

    Create a one-zone ray with an HI column density of 1e21 (DLA) and generate
    a COS spectrum from that ray for just the Lyman alpha line.

    >>> import trident
    >>> ds = trident.make_onezone_ray(column_densities={'H_number_density': 1e21})
    >>> sg = trident.SpectrumGenerator('COS')
    >>> sg.make_spectrum(ray, lines=['Ly a'])
    >>> sg.plot_spectrum('spec_raw.png')
    """
    from yt import save_as_dataset
    length = YTArray([length], "kpc")
    data = {
        "density": YTArray([density], "g/cm**3"),
        "metallicity": YTArray([metallicity], "Zsun"),
        "dl": length,
        "temperature": YTArray([temperature], "K"),
        "redshift": np.array([redshift]),
        "redshift_eff": np.array([redshift]),
        "velocity_los": YTArray([0.], "cm/s"),
        "x": length / 2,
        "dx": length,
        "y": length / 2,
        "dy": length,
        "z": length / 2,
        "dz": length
    }

    extra_attrs = {"data_type": "yt_light_ray", "dimensionality": 3}
    field_types = dict([(field, "grid") for field in data.keys()])

    # Add additional number_density fields to dataset
    if column_densities:
        for k, v in six.iteritems(column_densities):
            # Assure we add X_number_density for neutral ions
            # instead of X_p0_number_density
            key_string_list = k.split('_')
            if key_string_list[1] == 'p0':
                k = '_'.join([
                    key_string_list[0], key_string_list[2], key_string_list[3]
                ])
            v = YTArray([v], 'cm**-2')
            data[k] = v / length
            field_types[k] = 'grid'

    ds = {
        "current_time": 0.,
        "current_redshift": 0.,
        "cosmological_simulation": 0.,
        "domain_left_edge": np.zeros(3) * length,
        "domain_right_edge": np.ones(3) * length,
        "periodicity": [True] * 3
    }

    save_as_dataset(ds,
                    filename,
                    data,
                    field_types=field_types,
                    extra_attrs=extra_attrs)

    # load dataset and make spectrum
    ray = load(filename)
    return ray
Beispiel #19
0
    cooling_rate = fc.chemistry_data.cooling_units * fc["energy"] / \
        np.abs(fc["cooling_time"]) / density_proper

    data = {}
    t_sort = np.argsort(fc["temperature"])
    for field in fc.density_fields:
        data[field] = yt.YTArray(
            fc[field][t_sort] * my_chemistry.density_units, "g/cm**3")
    data["energy"] = yt.YTArray(
        fc["energy"][t_sort] * my_chemistry.energy_units, "erg/g")
    data["temperature"] = yt.YTArray(fc["temperature"][t_sort], "K")
    data["pressure"] = yt.YTArray(
        fc["pressure"][t_sort] * my_chemistry.pressure_units, "dyne/cm**2")
    data["cooling_time"] = yt.YTArray(fc["cooling_time"][t_sort], "s")
    data["cooling_rate"] = yt.YTArray(cooling_rate[t_sort], "erg*cm**3/s")

    pyplot.loglog(data["temperature"], data["cooling_rate"], color="black")
    pyplot.xlabel('T [K]')
    pyplot.ylabel('$\\Lambda$ [erg s$^{-1}$ cm$^{3}$]')

    # save data arrays as a yt dataset
    if 'PRIMORDIAL_CHEM' in os.environ:
        ds_name = 'cooling_rate.pc%s.h5' % os.environ['PRIMORDIAL_CHEM']
        im_name = 'cooling_rate.pc%s.png' % os.environ['PRIMORDIAL_CHEM']
    else:
        ds_name = 'cooling_rate.h5'
        im_name = 'cooling_rate.png'
    pyplot.tight_layout()
    pyplot.savefig(im_name)
    yt.save_as_dataset({}, ds_name, data)
Beispiel #20
0
            black_holes[pid]["time"].append(ds.current_time)
            mbh[i] = black_holes[pid]["mass"][-1]

        mdot = bondi_hoyle_accretion_rate(ds.r, mbh)

        for i, pid in enumerate(pids):
            if pid not in black_holes:
                continue
            black_holes[pid]["mdot"].append(mdot[i])
            black_holes[pid]["density"].append(ds.r["density"][i])
            black_holes[pid]["temperature"].append(ds.r["temperature"][i])

    for pid in black_holes:
        if black_holes[pid]["time"][-1] >= ds.current_time:
            continue
        t_el = ds.current_time - black_holes[pid]["time"][-1]
        m_new = black_holes[pid]["mass"][-1] + \
          black_holes[pid]["mdot"][-1] * t_el
        black_holes[pid]["mass"].append(m_new)
        black_holes[pid]["time"].append(ds.current_time)

    data = {}
    for bh in black_holes:
        for field in black_holes[bh]:
            key = ("p_%d_%s" % (bh, field))
            data[key] = es.arr(black_holes[bh][field])

    ftypes=dict((field, "star") for field in data)
    yt.save_as_dataset(es, os.path.join(data_dir, "black_holes_bh.h5"),
                       data, field_types=ftypes)