예제 #1
0
    def setup_particle_ics(self, regenerate_particles=False, prng=None):
        r"""
        From a set of cluster models and their relative positions and
        velocities, set up initial conditions for use with SPH codes.

        This routine will either generate a single cluster or will combine
        two or three clusters together. If more than one cluster is 
        generated, the gas particles will have their densities set by 
        adding the densities from the overlap of the two particles 
        together, and will have their thermal energies and velocities 
        set by mass-weighting them from the two profiles.

        Parameters
        ----------
        """
        profiles = [ClusterModel.from_h5_file(hf) for hf in self.profiles]
        parts = self._generate_particles(
            regenerate_particles=regenerate_particles, prng=prng)
        if self.num_halos == 1:
            all_parts = parts[0]
            all_parts.add_offsets(self.center[0], self.velocity[0])
        elif self.num_halos == 2:
            all_parts = combine_two_clusters(parts[0], parts[1], profiles[0],
                                             profiles[1], self.center[0],
                                             self.center[1], self.velocity[0],
                                             self.velocity[1])
        else:
            all_parts = combine_three_clusters(
                parts[0], parts[1], parts[2], profiles[0], profiles[1],
                profiles[2], self.center[0], self.center[1], self.center[2],
                self.velocity[0], self.velocity[1], self.velocity[2])
        return all_parts
예제 #2
0
 def _generate_particles(self, regenerate_particles=False, prng=None):
     prng = parse_prng(prng)
     parts = []
     for i, pf in enumerate(self.profiles):
         if regenerate_particles or self.particle_files[i] is None:
             m = ClusterModel.from_h5_file(pf)
             p = m.generate_dm_particles(self.num_particles["dm"][i],
                                         r_max=self.r_max,
                                         prng=prng)
             if self.num_particles["star"][i] > 0:
                 sp = m.generate_star_particles(
                     self.num_particles["star"][i],
                     r_max=self.r_max,
                     prng=prng)
                 p = p + sp
             if self.num_particles["gas"][i] > 0:
                 gp = m.generate_gas_particles(self.num_particles["gas"][i],
                                               r_max=self.r_max,
                                               prng=prng)
                 p = p + gp
             parts.append(p)
             outfile = f"{self.basename}_{i}_particles.h5"
             p.write_particles(outfile, overwrite=True)
             self.particle_files[i] = outfile
         else:
             p = ClusterParticles.from_file(self.particle_files[i])
             parts.append(p)
     return parts
예제 #3
0
def model_answer_testing(model, filename, answer_store, answer_dir):
    p = Path(answer_dir) / filename
    if answer_store:
        model.write_model_to_h5(p, overwrite=True)
    else:
        old_model = ClusterModel.from_h5_file(p)
        for field in old_model.fields:
            assert_equal(old_model[field], model[field])
        assert_equal(old_model.dm_virial.df, model.dm_virial.df)
        assert_equal(old_model.star_virial.df, model.star_virial.df)
예제 #4
0
 def _determine_num_particles(self):
     from collections import defaultdict
     dm_masses = []
     gas_masses = []
     star_masses = []
     for pf in self.profiles:
         p = ClusterModel.from_h5_file(pf)
         idxs = p["radius"] < self.r_max
         dm_masses.append(p["dark_matter_mass"][idxs][-1].value)
         if "gas_mass" in p:
             gmass = p["gas_mass"][idxs][-1].value
         else:
             gmass = 0.0
         gas_masses.append(gmass)
         if "stellar_mass" in p:
             smass = p["stellar_mass"][idxs][-1].value
         else:
             smass = 0.0
         star_masses.append(smass)
     tot_dm_mass = np.sum(dm_masses)
     tot_gas_mass = np.sum(gas_masses)
     tot_star_mass = np.sum(star_masses)
     self.num_particles = defaultdict(list)
     for i in range(self.num_halos):
         if self.tot_np.get("dm", 0) > 0:
             ndp = np.rint(self.tot_np["dm"] * dm_masses[i] /
                           tot_dm_mass).astype("int")
         else:
             ndp = 0
         self.num_particles["dm"].append(ndp)
         if self.tot_np.get("gas", 0) > 0:
             ngp = np.rint(self.tot_np["gas"] * gas_masses[i] /
                           tot_gas_mass).astype("int")
         else:
             ngp = 0
         self.num_particles["gas"].append(ngp)
         if self.tot_np.get("star", 0) > 0:
             nsp = np.rint(self.tot_np["star"] * star_masses[i] /
                           tot_star_mass).astype("int")
         else:
             nsp = 0
         self.num_particles["star"].append(nsp)
예제 #5
0
    def resample_particle_ics(self, parts, passive_scalars=None):
        r"""
        Given a Gadget-HDF5-like initial conditions file which has been
        output from some type of relaxation process (such as making a 
        glass or using MESHRELAX in the case of Arepo), resample the density,
        thermal energy, and velocity fields onto the gas particles/cells from
        the initial hydrostatic profiles.

        Parameters
        ----------
        filename : string
            The name of file to output the resampled ICs to.
        """
        profiles = [ClusterModel.from_h5_file(hf) for hf in self.profiles]
        if self.num_halos == 1:
            new_parts = resample_one_cluster(parts, profiles[0],
                                             self.center[0], self.velocity[0])
        elif self.num_halos == 2:
            new_parts = resample_two_clusters(parts,
                                              profiles[0],
                                              profiles[1],
                                              self.center[0],
                                              self.center[1],
                                              self.velocity[0],
                                              self.velocity[1],
                                              [self.r_max] * 2,
                                              passive_scalars=passive_scalars)
        else:
            new_parts = resample_three_clusters(
                parts,
                profiles[0],
                profiles[1],
                profiles[2],
                self.center[0],
                self.center[1],
                self.center[2],
                self.velocity[0],
                self.velocity[1],
                self.velocity[2], [self.r_max] * 3,
                passive_scalars=passive_scalars)
        return new_parts
예제 #6
0
def generate_model():
    z = 0.1
    M200 = 1.5e15
    conc = 4.0
    r200 = find_overdensity_radius(M200, 200.0, z=z)
    a = r200 / conc
    M = snfw_total_mass(M200, r200, a)
    rhot = snfw_density_profile(M, a)
    Mt = snfw_mass_profile(M, a)
    r500, M500 = find_radius_mass(Mt, z=z, delta=500.0)
    f_g = 0.12
    rhog = vikhlinin_density_profile(1.0, 100.0, r200, 1.0, 0.67, 3)
    rhog = rescale_profile_by_mass(rhog, f_g * M500, r500)
    rhos = 0.02 * rhot
    rmin = 0.1
    rmax = 10000.0
    m = ClusterModel.from_dens_and_tden(rmin,
                                        rmax,
                                        rhog,
                                        rhot,
                                        stellar_density=rhos)
    m.set_magnetic_field_from_beta(100.0, gaussian=True)

    return m
예제 #7
0
def setup_gamer_ics(ics, regenerate_particles=False, use_tracers=False):
    r"""

    Generate the "Input_TestProb" lines needed for use
    with the ClusterMerger setup in GAMER. If the particles
    (dark matter and potentially star) have not been 
    created yet, they will be created at this step. New profile 
    files will also be created which have all fields in CGS units
    for reading into GAMER. If a magnetic field file is present
    in the ICs, a note will be given about how it should be named
    for GAMER to use it.

    NOTE: Gas particles in the initial conditions will be interpreted
    as tracer particles.

    Parameters
    ----------
    ics : ClusterICs object
        The ClusterICs object to generate the GAMER ICs from.
    regenerate_particles : boolean, optional
        If particle files have already been created and this
        flag is set to True, the particles will be
        re-created. Default: False
    use_tracers : boolean
        Set to True to add tracer particles. Default: False
    """
    gamer_ptypes = ["dm", "star"]
    if use_tracers:
        gamer_ptypes.insert(0, "gas")
    gamer_ptype_num = {"gas": 0, "dm": 2, "star": 3}
    hses = [ClusterModel.from_h5_file(hf) for hf in ics.profiles]
    parts = ics._generate_particles(
        regenerate_particles=regenerate_particles)
    outlines = [
        f"Merger_Coll_NumHalos\t\t{ics.num_halos}\t# number of halos"
    ]
    for i in range(ics.num_halos):
        particle_file = f"{ics.basename}_gamerp_{i+1}.h5"
        parts[i].write_sim_input(particle_file, gamer_ptypes, gamer_ptype_num)
        hse_file_gamer = ics.profiles[i].replace(".h5", "_gamer.h5")
        hses[i].write_model_to_h5(hse_file_gamer, overwrite=True,
                                  in_cgs=True, r_max=ics.r_max)
        vel = ics.velocity[i].to_value("km/s")
        outlines += [
            f"Merger_File_Prof{i+1}\t\t{hse_file_gamer}\t# profile table of cluster {i+1}",
            f"Merger_File_Par{i+1}\t\t{particle_file}\t# particle file of cluster {i+1}",
            f"Merger_Coll_PosX{i+1}\t\t{ics.center[i][0].v}\t# X-center of cluster {i+1} in kpc",
            f"Merger_Coll_PosY{i+1}\t\t{ics.center[i][1].v}\t# Y-center of cluster {i+1} in kpc",
            f"Merger_Coll_VelX{i+1}\t\t{vel[0]}\t# X-velocity of cluster {i+1} in km/s",
            f"Merger_Coll_VelY{i+1}\t\t{vel[1]}\t# Y-velocity of cluster {i+1} in km/s"
        ]
    mylog.info("Write the following lines to Input__TestProblem: ")
    for line in outlines:
        print(line)
    num_particles = sum([ics.tot_np[key] for key in ics.tot_np])
    mylog.info(f"In the Input__Parameter file, "
               f"set PAR__NPAR = {num_particles}.")
    if ics.mag_file is not None:
        mylog.info(f"Rename the file '{ics.mag_file}' to 'B_IC' "
                   f"and place it in the same directory as the "
                   f"Input__* files, and set OPT__INIT_BFIELD_BYFILE "
                   f"to 1 in Input__Parameter")
예제 #8
0
    def create_dataset(self,
                       domain_dimensions,
                       box_size,
                       left_edge=None,
                       **kwargs):
        """
        Create an in-memory, uniformly gridded dataset in 3D using yt by
        placing the clusters into a box. When adding multiple clusters,
        per-volume quantities from each cluster such as density and
        pressure are added, whereas per-mass quantites such as temperature
        and velocity are mass-weighted.

        Parameters
        ----------
        domain_dimensions : 3-tuple of ints
            The number of cells on a side for the domain.
        box_size : float
            The size of the box in kpc.
        left_edge : array_like, optional
            The minimum coordinate of the box in all three dimensions,
            in kpc. Default: None, which means the left edge will
            be [0, 0, 0].
        """
        from yt.loaders import load_uniform_grid
        from scipy.interpolate import InterpolatedUnivariateSpline
        if left_edge is None:
            left_edge = np.zeros(3)
        left_edge = np.array(left_edge)
        bbox = [[left_edge[0], left_edge[0] + box_size],
                [left_edge[1], left_edge[1] + box_size],
                [left_edge[2], left_edge[2] + box_size]]
        x, y, z = np.mgrid[bbox[0][0]:bbox[0][1]:domain_dimensions[0] * 1j,
                           bbox[1][0]:bbox[1][1]:domain_dimensions[1] * 1j,
                           bbox[2][0]:bbox[2][1]:domain_dimensions[2] * 1j, ]
        fields1 = [
            "density", "pressure", "dark_matter_density"
            "stellar_density", "gravitational_potential"
        ]
        fields2 = ["temperature"]
        fields3 = ["velocity_x", "velocity_y", "velocity_z"]
        units = {
            "density": "Msun/kpc**3",
            "pressure": "Msun/kpc/Myr**2",
            "dark_matter_density": "Msun/kpc**3",
            "stellar_density": "Msun/kpc**3",
            "temperature": "K",
            "gravitational_potential": "kpc**2/Myr**2",
            "velocity_x": "kpc/Myr",
            "velocity_y": "kpc/Myr",
            "velocity_z": "kpc/Myr",
            "magnetic_field_strength": "G"
        }
        fields = fields1 + fields2
        data = {}
        for i, profile in enumerate(self.profiles):
            p = ClusterModel.from_h5_file(profile)
            xx = x - self.center.d[i][0]
            yy = y - self.center.d[i][1]
            zz = z - self.center.d[i][2]
            rr = np.sqrt(xx * xx + yy * yy + zz * zz)
            fd = InterpolatedUnivariateSpline(p["radius"].d, p["density"].d)
            for field in fields:
                if field not in p:
                    continue
                if field not in data:
                    data[field] = (np.zeros(domain_dimensions), units[field])
                f = InterpolatedUnivariateSpline(p["radius"].d, p[field].d)
                if field in fields1:
                    data[field][0] += f(rr)
                elif field in fields2:
                    data[field][0] += f(rr) * fd(rr)
            for field in fields3:
                data[field][0] += self.velocity.d[i][0] * fd(rr)
        if "density" in data:
            for field in fields2 + fields3:
                data[field][0] /= data["density"][0]
        return load_uniform_grid(data,
                                 domain_dimensions,
                                 length_unit="kpc",
                                 bbox=bbox,
                                 mass_unit="Msun",
                                 time_unit="Myr",
                                 **kwargs)