def test_generate_smoothing_length_faster(filename):
    data = load(filename)

    smoothing_lengths = data.gas.smoothing_lengths

    # Parameters required to generate smoothing lengths
    number_of_neighbours = int(
        round(data.metadata.hydro_scheme["Kernel target N_ngb"][0]))
    kernel_eta = data.metadata.hydro_scheme["Kernel eta"][0]

    kernel_gamma = ((3.0 * number_of_neighbours) /
                    (4.0 * 3.14159))**(1 / 3) / kernel_eta

    generated_smoothing_lengths = generate_smoothing_lengths(
        data.gas.coordinates,
        boxsize=data.metadata.boxsize,
        kernel_gamma=kernel_gamma,
        neighbours=number_of_neighbours,
        speedup_fac=2,
        dimension=3,
    ).to(smoothing_lengths.units)

    assert isclose(generated_smoothing_lengths.value, smoothing_lengths.value,
                   0.1).all()

    return
示例#2
0
    def set_dm_particles(self) -> None:

        coord = self.data.read_snapshot('PartType1/Coordinates')

        smoothing_lengths = generate_smoothing_lengths(
            coord,
            self.data.snapshot_file['Header'].attrs['BoxSize'],
            kernel_gamma=1.8,
            neighbours=57,
            speedup_fac=3,
            dimension=3,
        )

        masses = np.ones_like(coord[:, 0], dtype=np.float64)
        self.dm_smoothing_lengths = smoothing_lengths
        self.dm_masses = masses * self.data.snapshot_file['Header'].attrs['MassTable'][1]
示例#3
0
def dm_render(swio_data, region: list = None, resolution: int = 1024):
    # Generate smoothing lengths for the dark matter
    swio_data.dark_matter.smoothing_lengths = generate_smoothing_lengths(
        swio_data.dark_matter.coordinates,
        swio_data.metadata.boxsize,
        kernel_gamma=1.8,
        neighbours=57,
        speedup_fac=2,
        dimension=3,
    )
    # Project the dark matter mass
    dm_map = project_pixel_grid(
        # Note here that we pass in the dark matter dataset not the whole
        # data object, to specify what particle type we wish to visualise
        data=swio_data.dark_matter,
        boxsize=swio_data.metadata.boxsize,
        resolution=resolution,
        project=None,
        parallel=True,
        region=region)
    return dm_map
示例#4
0
    def set_dm_particles(self) -> None:

        boxsize = self.data.boxsize
        coord = self.data.subfind_particles[f'PartType1']['Coordinates']
        for i in [0, 1, 2]:
            if np.min(coord[:, i]) < 0:
                coord[:, i] += boxsize / 2
            elif np.max(coord[:, i]) > boxsize:
                coord[:, i] -= boxsize / 2
        smoothing_lengths = generate_smoothing_lengths(
            coord,
            boxsize,
            kernel_gamma=1.8,
            neighbours=57,
            speedup_fac=3,
            dimension=3,
        )

        self.data.subfind_particles['PartType1'][
            'SmoothingLength'] = smoothing_lengths
        masses = np.ones_like(coord[:, 0].value,
                              dtype=np.float32) * self.data.mass_DMpart
        self.data.subfind_particles['PartType1']['Mass'] = masses
        return
示例#5
0
from swiftsimio import load
from swiftsimio.visualisation.projection import project_pixel_grid
from swiftsimio.visualisation.smoothing_length_generation import generate_smoothing_lengths
from swiftsimio.visualisation.projection import scatter_parallel

data = load("/cosma6/data/dp004/dc-borr1/swift-test-data/eagle_0037.hdf5")

# Generate smoothing lengths for the dark matter
data.dark_matter.smoothing_lengths = generate_smoothing_lengths(
    data.dark_matter.coordinates,
    data.metadata.boxsize,
    kernel_gamma=1.8,
    neighbours=57,
    speedup_fac=2,
    dimension=3,
)

# Project the dark matter mass
dm_mass = project_pixel_grid(
    # Note here that we pass in the dark matter dataset not the whole
    # data object, to specify what particle type we wish to visualise
    data=data.dark_matter,
    boxsize=data.metadata.boxsize,
    resolution=1024,
    project="masses",
    parallel=True,
    region=None)

#dm_mass = scatter_parallel(x=data.dark_matter.coordinates[0], y=data.dark_matter.coordinates[1], h= data.dark_matter.smoothing_lengths, m=data.dark_matter.masses, res=1024)

from matplotlib.pyplot import imsave
示例#6
0
def dm_rotation_map(halo, resolution: int = 1024, alignment: str = 'edgeon'):
    data = MacsisDataset(halo)

    # Read data
    coordinates = data.read_snapshot('PartType1/Coordinates')
    velocities = data.read_snapshot('PartType1/Velocity')

    # Remember that the largest FOF has index 1
    centre_of_potential = data.read_catalogue_subfindtab(
        'FOF/GroupCentreOfPotential')[1] / data.a
    r500_crit = data.read_catalogue_subfindtab(
        'FOF/Group_R_Crit500')[1] / data.a
    m500_crit = data.read_catalogue_subfindtab('FOF/Group_M_Crit500')[1]

    # Generate smoothing lengths for dark matter
    smoothing_lengths = generate_smoothing_lengths(
        coordinates * unyt.Mpc,
        data.read_header('BoxSize') * unyt.Mpc,
        kernel_gamma=1.897367,  # Taken from Dehnen & Aly 2012
        neighbours=57,
        speedup_fac=1,
        dimension=3,
    ).value

    # Rescale coordinates to CoP
    coordinates[:, 0] -= centre_of_potential[0]
    coordinates[:, 1] -= centre_of_potential[1]
    coordinates[:, 2] -= centre_of_potential[2]

    # Compute mean velocity inside R500
    radial_dist = np.sqrt(coordinates[:, 0]**2 + coordinates[:, 1]**2 +
                          coordinates[:, 2]**2)
    r500_mask = np.where(radial_dist < r500_crit)[0]

    mean_velocity_r500 = np.sum(velocities[r500_mask], axis=0) / len(
        velocities[r500_mask])
    angular_momentum_r500 = np.sum(np.cross(coordinates[r500_mask],
                                            velocities[r500_mask]),
                                   axis=0) / len(velocities[r500_mask])

    velocities_rest_frame = velocities.copy()
    velocities_rest_frame[:, 0] -= mean_velocity_r500[0]
    velocities_rest_frame[:, 1] -= mean_velocity_r500[1]
    velocities_rest_frame[:, 2] -= mean_velocity_r500[2]

    # Rotate coordinates and velocities
    coordinates_edgeon = rotate(coordinates,
                                angular_momentum_r500,
                                tilt=alignment)
    velocities_rest_frame_edgeon = rotate(velocities_rest_frame,
                                          angular_momentum_r500,
                                          tilt=alignment)

    # Rotate angular momentum vector for cross check
    angular_momentum_r500_rotated = rotate(
        angular_momentum_r500 / np.linalg.norm(angular_momentum_r500),
        angular_momentum_r500,
        tilt=alignment) * r500_crit / 2

    compton_y = -velocities_rest_frame_edgeon[:, 2]

    # Restrict map to 2*R500
    spatial_filter = np.where((np.abs(coordinates_edgeon[:, 0]) < r500_crit)
                              & (np.abs(coordinates_edgeon[:, 1]) < r500_crit)
                              & (np.abs(coordinates_edgeon[:,
                                                           2]) < r500_crit))[0]

    coordinates_edgeon = coordinates_edgeon[spatial_filter]
    velocities_rest_frame_edgeon = velocities_rest_frame_edgeon[spatial_filter]
    smoothing_lengths = smoothing_lengths[spatial_filter]
    compton_y = compton_y[spatial_filter]

    # Make map using swiftsimio
    x = (coordinates_edgeon[:, 0] - coordinates_edgeon[:, 0].min()) / (
        coordinates_edgeon[:, 0].max() - coordinates_edgeon[:, 0].min())
    y = (coordinates_edgeon[:, 1] - coordinates_edgeon[:, 1].min()) / (
        coordinates_edgeon[:, 1].max() - coordinates_edgeon[:, 1].min())
    h = smoothing_lengths / (coordinates_edgeon[:, 0].max() -
                             coordinates_edgeon[:, 0].min())

    # Gather and handle coordinates to be processed
    x = np.asarray(x, dtype=np.float64)
    y = np.asarray(y, dtype=np.float64)
    m = np.asarray(compton_y, dtype=np.float32)
    h = np.asarray(h, dtype=np.float32)
    smoothed_map = scatter(x=x, y=y, m=m, h=h, res=resolution).T

    # Parse info about smoothing lengths
    smoothing_lengths_info = {
        'smoothing_lengths_mean': np.nanmean(smoothing_lengths),
        'smoothing_lengths_std': np.nanstd(smoothing_lengths),
        'smoothing_lengths_median': np.nanmedian(smoothing_lengths),
        'smoothing_lengths_max': np.nanmax(smoothing_lengths),
        'smoothing_lengths_min': np.nanmin(smoothing_lengths),
    }

    return smoothed_map / 1.e10, smoothing_lengths_info
示例#7
0
                                 snapshot_path,
                                 generate_extra_mask=False)

    for particle_type, cmap in particle_type_cmap.items():
        particle_data = getattr(data, particle_type)

        x = particles.x / data.metadata.a
        y = particles.y / data.metadata.a
        z = particles.z / data.metadata.a
        r_size = particles.r_size * 0.5 / data.metadata.a

        if particle_type in ["stars", "dark_matter"]:
            particle_data.smoothing_lengths = generate_smoothing_lengths(
                coordinates=particle_data.coordinates,
                boxsize=data.metadata.boxsize,
                kernel_gamma=kernel_gamma,
                neighbours=57,
                speedup_fac=1,
                dimension=3,
            )

        pixel_grid = project_pixel_grid(
            particle_data,
            boxsize=data.metadata.boxsize,
            resolution=1024,
            region=[x - r_size, x + r_size, y - r_size, y + r_size],
        )

        if particle_type == "stars":
            # Need to clip.
            nonzero = pixel_grid >= 1e-2
            min_nonzero = np.min(pixel_grid[nonzero])
示例#8
0
def dm_map_parent(
        run_name: str,
        velociraptor_properties_parent: str,
        snap_filepath_parent: str,
        velociraptor_properties_zoom: str,
        out_to_radius: Tuple[int, str] = (5, 'r200c'),
        highres_radius: Tuple[int, str] = (6, 'r500c'),
        output_directory: str = '.'
) -> None:
    print(f"Rendering {snap_filepath_parent}...")

    # Rendezvous over parent VR catalogue using zoom information
    with h5py.File(velociraptor_properties_zoom, 'r') as vr_file:
        idx, M200c, R200c, Xcminpot, Ycminpot, Zcminpot = find_object(
            vr_properties_catalog=velociraptor_properties_parent,
            sample_structType=10,
            sample_mass_lower_lim=vr_file['/Mass_200crit'][0] * 1e10 * 0.8,
            sample_x=vr_file['/Xcminpot'][0],
            sample_y=vr_file['/Ycminpot'][0],
            sample_z=vr_file['/Zcminpot'][0],
        )
    with h5py.File(velociraptor_properties_parent, 'r') as vr_file:
        R500c = unyt.unyt_quantity(vr_file['/SO_R_500_rhocrit'][idx], unyt.Mpc)

    M200c = unyt.unyt_quantity(M200c, unyt.Solar_Mass)
    R200c = unyt.unyt_quantity(R200c, unyt.Mpc)
    xCen = unyt.unyt_quantity(Xcminpot, unyt.Mpc)
    yCen = unyt.unyt_quantity(Ycminpot, unyt.Mpc)
    zCen = unyt.unyt_quantity(Zcminpot, unyt.Mpc)

    if out_to_radius[1] == 'r200c':
        size = out_to_radius[0] * R200c
    elif out_to_radius[1] == 'r500c':
        size = out_to_radius[0] * R500c
    elif out_to_radius[1] == 'Mpc' or out_to_radius[1] is None:
        size = unyt.unyt_quantity(out_to_radius[0], unyt.Mpc)

    if highres_radius[1] == 'r200c':
        _highres_radius = highres_radius[0] * R200c
    elif highres_radius[1] == 'r500c':
        _highres_radius = highres_radius[0] * R500c
    elif highres_radius[1] == 'Mpc' or highres_radius[1] is None:
        _highres_radius = unyt.unyt_quantity(highres_radius[0], unyt.Mpc)

    # Construct spatial mask to feed into swiftsimio
    mask = sw.mask(snap_filepath_parent)
    region = [
        [xCen - size, xCen + size],
        [yCen - size, yCen + size],
        [zCen - size, zCen + size]
    ]
    mask.constrain_spatial(region)
    data = sw.load(snap_filepath_parent, mask=mask)

    # Generate smoothing lengths for the dark matter
    data.dark_matter.smoothing_lengths = generate_smoothing_lengths(
        data.dark_matter.coordinates,
        data.metadata.boxsize,
        kernel_gamma=1.8,
        neighbours=57,
        speedup_fac=2,
        dimension=3,
    )

    # data.dark_matter.coordinates[:, 0] = wrap(
    #     data.dark_matter.coordinates[:, 0] - xCen,
    #     data.metadata.boxsize[0]
    # )
    # data.dark_matter.coordinates[:, 1] = wrap(
    #     data.dark_matter.coordinates[:, 1] - yCen,
    #     data.metadata.boxsize[1]
    # )
    # data.dark_matter.coordinates[:, 2] = wrap(
    #     data.dark_matter.coordinates[:, 2] - zCen,
    #     data.metadata.boxsize[2]
    # )
    
    dm_mass = dm_render(data, region=(
        [
            xCen - size,
            xCen + size,
            yCen - size,
            yCen + size
        ]
    ), resolution=resolution)

    # Make figure
    fig, ax = plt.subplots(figsize=(8, 8), dpi=resolution // 8)
    fig.subplots_adjust(0, 0, 1, 1)
    ax.axis("off")
    ax.imshow(dm_mass.T, norm=LogNorm(), cmap="inferno", origin="lower", extent=(region[0] + region[1]))
    info = ax.text(
        0.025,
        0.025,
        (
            f"Halo {run_name:s} DMO - parent\n"
            f"$z={data.metadata.z:3.3f}$\n"
            f"$M_{{200c}}={latex_float(M200c.value)}\\ {M200c.units.latex_repr}$\n"
            f"$R_{{200c}}={latex_float(R200c.value)}\\ {R200c.units.latex_repr}$\n"
            f"$R_\\mathrm{{clean}}={highres_radius[0]}\\ {highres_radius[1]}$"
        ),
        color="white",
        ha="left",
        va="bottom",
        alpha=0.8,
        transform=ax.transAxes,
    )
    info.set_bbox(dict(facecolor='black', alpha=0.2, edgecolor='grey'))
    ax.text(
        xCen,
        yCen + 1.05 * R200c,
        r"$R_{200c}$",
        color="black",
        ha="center",
        va="bottom"
    )
    ax.text(
        xCen,
        yCen + 1.02 * _highres_radius,
        r"$R_\mathrm{clean}$",
        color="white",
        ha="center",
        va="bottom"
    )
    circle_r200 = plt.Circle((xCen, yCen), R200c, color="black", fill=False, linestyle='-')
    circle_clean = plt.Circle((xCen, yCen), _highres_radius.value, color="white", fill=False, linestyle=':')
    ax.add_artist(circle_r200)
    ax.add_artist(circle_clean)
    ax.set_xlim([xCen.value - size.value, xCen.value + size.value])
    ax.set_ylim([yCen.value - size.value, yCen.value + size.value])
    fig.savefig(f"{output_directory}/{run_name}_dark_matter_map_parent.png")
    plt.close(fig)
    print(f"Saved: {output_directory}/{run_name}_dark_matter_map_parent.png")
    del data, dm_mass
    plt.close('all')

    return
def density_map(particle_type: int, cluster_data) -> None:

    z = cluster_data.header.subfind_particles.Redshift
    CoP = cluster_data.subfind_tab.FOF.GroupCentreOfPotential
    M200c = cluster_data.subfind_tab.FOF.Group_M_Crit200
    R200c = cluster_data.subfind_tab.FOF.Group_R_Crit200
    R500c = cluster_data.subfind_tab.FOF.Group_R_Crit500
    M500c = cluster_data.subfind_tab.FOF.Group_M_Crit500
    coord = cluster_data.subfind_particles[f'PartType{particle_type}'][
        'Coordinates']
    boxsize = cluster_data.boxsize
    DM_part_mass = cluster_data.mass_DMpart

    if particle_type == 1:

        masses = np.ones_like(coord[:, 0].value,
                              dtype=np.float32) * DM_part_mass
        # Generate DM particle smoothing lengths
        smoothing_lengths = generate_smoothing_lengths(
            coord,
            boxsize,
            kernel_gamma=1.8,
            neighbours=57,
            speedup_fac=3,
            dimension=3,
        )

    else:
        masses = cluster_data.subfind_particles[f'PartType{particle_type}'][
            'Mass']
        smoothing_lengths = cluster_data.subfind_particles[
            f'PartType{particle_type}']['SmoothingLength']

    # Run aperture filter
    read.pprint('[Check] Particle max x: ',
                np.max(np.abs(coord[:, 0] - CoP[0])), '6 x R500c: ', 6 * R500c)
    read.pprint('[Check] Particle max y: ',
                np.max(np.abs(coord[:, 1] - CoP[1])), '6 x R500c: ', 6 * R500c)
    read.pprint('[Check] Particle max z: ',
                np.max(np.abs(coord[:, 2] - CoP[2])), '6 x R500c: ', 6 * R500c)

    # Rotate particles
    # coord_rot = rotation_align_with_vector(coord.value, CoP, np.array([0, 0, 1]))
    coord_rot = coord

    # After derotation create a cubic aperture filter inscribed within a sphere of radius 5xR500c and
    # Centred in the CoP. Each semi-side of the aperture has length 5 * R500c / sqrt(3).
    aperture = 5 * R500c / np.sqrt(3)
    mask = np.where((np.abs(coord_rot[:, 0] - CoP[0]) <= aperture)
                    & (np.abs(coord_rot[:, 1] - CoP[1]) <= aperture)
                    & (np.abs(coord_rot[:, 2] - CoP[2]) <= aperture))[0]

    # Gather and handle coordinates to be plotted
    x = coord_rot[mask, 0].value
    y = coord_rot[mask, 1].value
    x_max = np.max(x)
    x_min = np.min(x)
    y_max = np.max(y)
    y_min = np.min(y)
    x_range = x_max - x_min
    y_range = y_max - y_min

    # Test that we've got a square box
    read.pprint(x_range, y_range)

    map_input_m = np.asarray(masses.value, dtype=np.float32)
    map_input_h = np.asarray(smoothing_lengths.value, dtype=np.float32)
    mass_map = scatter(x=(x - x_min) / x_range,
                       y=(y - y_min) / y_range,
                       m=map_input_m[mask],
                       h=map_input_h[mask] / x_range,
                       res=map_resolution)
    mass_map_units = masses.units / coord.units**2

    # Mask zero values in the map with black
    mass_map = np.ma.masked_where(mass_map < 0.05, mass_map)
    read.pprint(mass_map)

    # Make figure
    fig, ax = plt.subplots(figsize=(6, 6), dpi=map_resolution // 6)
    ax.set_aspect('equal')
    fig.subplots_adjust(0, 0, 1, 1)
    ax.axis("off")
    ax.imshow(mass_map.T,
              norm=LogNorm(),
              cmap="inferno",
              origin="lower",
              extent=[x_min, x_max, y_min, y_max])

    t = ax.text(
        0.025,
        0.025,
        (f"Halo {cluster_id:d} {simulation_type}\n"
         f"Particles: {partType_atlas[str(particle_type)]}\n"
         f"$z={z:3.3f}$\n"
         f"$M_{{500c}}={latex_float(M500c.value)}$ M$_\odot$\n"
         f"$R_{{500c}}={latex_float(R500c.value)}$ Mpc\n"
         f"$M_{{200c}}={latex_float(M200c.value)}$ M$_\odot$\n"
         f"$R_{{200c}}={latex_float(R200c.value)}$ Mpc"),
        color="white",
        ha="left",
        va="bottom",
        transform=ax.transAxes,
    )
    t.set_bbox(dict(facecolor='black', alpha=0.2, edgecolor='none'))
    ax.text(CoP[0],
            CoP[1] + 1.02 * R500c,
            r"$R_{500c}$",
            color="white",
            ha="center",
            va="bottom")
    circle_r500 = plt.Circle((CoP[0], CoP[1]),
                             R500c,
                             color="white",
                             fill=False,
                             linestyle='-')
    ax.add_artist(circle_r500)
    plt.tight_layout()
    fig.savefig(
        f"{output_directory}/halo{cluster_id}_{redshift}_densitymap_type{particle_type}.png"
    )
    plt.close(fig)
示例#10
0
def project(
    data, image_attributes: ImageAttributes, galaxy_attributes: GalaxyAttributes
):
    """
    Creates a projection image of `data` given attributes.
    """

    particle_data = getattr(data, image_attributes.particle_type)

    try:
        particle_data.smoothing_lengths
    except AttributeError:
        # Need to genereate smoothing lengths
        particle_data.smoothing_lengths = generate_smoothing_lengths(
            coordinates=particle_data.coordinates,
            boxsize=data.metadata.boxsize,
            kernel_gamma=kernel_gamma,
            neighbours=57,
            speedup_fac=2,
            dimension=3,
        )

    # Set up extra plot parameters
    radius_distance = galaxy_attributes.radius * image_attributes.number_of_radii
    region = [
        galaxy_attributes.center[0] - radius_distance,
        galaxy_attributes.center[0] + radius_distance,
        galaxy_attributes.center[1] - radius_distance,
        galaxy_attributes.center[1] + radius_distance,
    ]
    rotation_matrix, rotation_center = get_rotation(image_attributes, galaxy_attributes)

    common_attributes = dict(
        data=particle_data,
        boxsize=data.metadata.boxsize,
        resolution=image_attributes.resolution,
        region=region,
        mask=None,
        rotation_matrix=rotation_matrix,
        rotation_center=rotation_center,
        parallel=False,
    )

    # Swiftsimio has a nasty habit of reading these in as grams.
    particle_data.masses.convert_to_units("1e10 * Solar_Mass")
    # Have we already made the mass image?
    image_mass_hash = hash(
        f"{image_attributes.resolution}"
        f"{image_attributes.particle_type}"
        f"{image_attributes.projection}"
        f"{galaxy_attributes.unique_id}"
    )

    try:
        mass_image = image_cache[image_mass_hash]
    except KeyError:
        mass_image = project_pixel_grid(project="masses", **common_attributes)
        image_cache[image_mass_hash] = mass_image

    # Special case for mass density projection
    if image_attributes.visualise == "projected_density":
        # Units are more complex here as this is a smoothed density.
        x_range = region[1] - region[0]
        y_range = region[3] - region[2]
        units = 1.0 / (x_range * y_range)
        # Unfortunately this is required to prevent us from {over,under}flowing
        # the units...
        units.convert_to_units(1.0 / (x_range.units * y_range.units))
        units *= particle_data.masses.units

        image = unyt_array(mass_image, units=units)
    else:
        # Set mass-weighted attribute

        setattr(
            particle_data,
            f"_CACHE_MASSWEIGHTED",
            getattr(particle_data, image_attributes.visualise)
            * particle_data.masses.value,
        )

        with np.testing.suppress_warnings() as sup:
            sup.filter(RuntimeWarning)
            image = unyt_array(
                project_pixel_grid(project="_CACHE_MASSWEIGHTED", **common_attributes)
                / mass_image,
                units=getattr(particle_data, image_attributes.visualise).units,
            )

    return image
示例#11
0
def visualise_halo(
    output_path: Path,
    snapshot_path: Path,
    config: ImageConfig,
    halo: Halo,
):
    """
    Creates all of the visualisations in the config for the
    specified halo, and saves them to disk.

    Parameters
    ----------

    output_path: Path, str
        Output path to save images to. Inside this path, there will be
        a number of directories created (one per halo). This path must
        already exist.

    snapshot_path: Path,
        Path to the snapshot. For a sufficiently large volume, and
        a sufficiently small number of haloes, there will be little-to
        -no overlap in the read regions.

    config: ImageConfig
        Opened configuration file.

    halo: Halo
        The halo to read the data for and visualise.
    """

    # First need to find the maximum radius, amongst any
    # of the images.
    radii = [
        image.get_radius(stellar_half_mass=halo.radius_100_kpc_star,
                         r_200_crit=halo.radius_200_crit)
        for image in config.images
    ]

    max_radius = max(radii)

    extent_given_r = lambda r: [[halo.position[x] - r, halo.position[x] + r]
                                for x in range(3)]

    halo_mask = mask(filename=snapshot_path, spatial_only=True)
    halo_mask.constrain_spatial(restrict=extent_given_r(max_radius))

    data = load(filename=snapshot_path, mask=halo_mask)

    # Generate the smoothing lengths if required.
    if config.calculate_dark_matter_smoothing_lengths:
        data.dark_matter.smoothing_lengths = generate_smoothing_lengths(
            coordinates=data.dark_matter.coordinates,
            boxsize=data.metadata.boxsize,
            kernel_gamma=kernel_gamma,
        )

    if config.recalculate_stellar_smoothing_lengths and hasattr(data, "stars"):
        if len(data.stars.coordinates) > 0:
            data.stars.smoothing_lengths = generate_smoothing_lengths(
                coordinates=data.stars.coordinates,
                boxsize=data.metadata.boxsize,
                kernel_gamma=kernel_gamma,
            )

    halo_directory = output_path / f"halo_{halo.unique_id}"
    halo_directory.mkdir(exist_ok=True)

    for image in config.images:
        # Which projections should we make?
        projections = [Projection.DEFAULT]

        if image.face_on:
            projections.append(Projection.FACE_ON)

        if image.edge_on:
            projections.append(Projection.EDGE_ON)

        for projection in projections:
            scatter = create_scatter(
                snapshot=data,
                halo=halo,
                image=image,
                projection=projection,
                resolution=config.resolution,
            )

            save_figure_from_scatter(
                scatter=scatter,
                config=config,
                halo=halo,
                image=image,
                projection=projection,
                output_path=halo_directory,
            )

            if (projection == Projection.DEFAULT
                    and image.base_name == config.thumbnail_image):
                save_thumbnail_from_scatter(
                    scatter=scatter,
                    config=config,
                    halo=halo,
                    image=image,
                    projection=projection,
                    output_path=halo_directory,
                )
示例#12
0
    u_l = 3.085678e21
    u_m = 1.989e43
    u_v = 1e5
    u_t = u_l / u_v
    f["Units"].attrs["Unit length in cgs (U_L)"] = u_l
    f["Units"].attrs["Unit mass in cgs (U_M)"] = u_m
    f["Units"].attrs["Unit time in cgs (U_t)"] = u_t
    f["Units"].attrs["Unit current in cgs (U_I)"] = 1.0
    f["Units"].attrs["Unit temperature in cgs (U_T)"] = 1.0

    # Create the mass arrays
    for i in range(len(npart)):
        if npart[i] == 0:
            continue

        grp = f["PartType%i" % i]
        if "Masses" in grp:
            continue
        masses = np.ones(npart[i]) * mass[i]
        grp.create_dataset("Masses", data=masses, dtype="f")

    # Create the smoothing lengths
    pos = f["PartType0/Coordinates"][:] * kpc
    h = generate_smoothing_lengths(pos, box * kpc, kernel_gamma=1.825742)
    f["PartType0"].create_dataset("SmoothingLength", data=h.value, dtype="f")

    # Deal with the internal energy
    u = (np.ones(h.shape) * -1
         )  # We set it through the parameters => fill it with garbage
    f["PartType0"].create_dataset("InternalEnergy", data=u, dtype="f")
from swiftsimio.visualisation.projection import scatter_parallel
import project
from matplotlib.colors import LogNorm
from swiftsimio.visualisation.smoothing_length_generation import generate_smoothing_lengths

spread = unyt.unyt_array(numpy.loadtxt('/cosma5/data/durham/dc-murr1/dm_spread.txt'), units = 'Mpc')
halo_mass = unyt.unyt_array(numpy.loadtxt('/cosma5/data/durham/dc-murr1/dm_nearest_halo_mass.txt'), units = 'msun')
dm_halos = numpy.loadtxt('/cosma5/data/durham/dc-murr1/dm_neighbour_halos.txt')
dm_halos = dm_halos.astype(int)
snap = load('/cosma6/data/dp004/dc-borr1/swift-test-data/eagle_0037.hdf5')
coords = snap.dark_matter.coordinates
masses = snap.dark_matter.masses
smooth = generate_smoothing_lengths(
    coords,
    snap.metadata.boxsize,
    kernel_gamma=1.8,
    neighbours=57,
    speedup_fac=2,
    dimension=3,
)

mask = numpy.where(halo_mass > 0)[0]
spread = spread[mask]
halo_mass = halo_mass[mask]
dm_halos = dm_halos[mask]
coords = coords[mask]
masses = masses[mask]
smooth = smooth[mask]

where = numpy.where(dm_halos >= 0)[0]
spread = spread[where]
halo_mass = halo_mass[where]
示例#14
0
    def process_single_halo(
            self,
            zoom_obj: Zoom = None,
            path_to_snap: str = None,
            path_to_catalogue: str = None,
            mask_radius_r500: float = 6,
            map_centre: Union[str, list,
                              np.ndarray] = 'vr_centre_of_potential',
            depth: Optional[float] = None,
            return_type: Union[type, str] = 'class'):
        sw_data, vr_data = self.get_handles_from_zoom(
            zoom_obj,
            path_to_snap,
            path_to_catalogue,
            mask_radius_r500=mask_radius_r500,
        )

        map_centres_allowed = ['vr_centre_of_potential']

        if type(map_centre) is str and map_centre.lower(
        ) not in map_centres_allowed:
            raise AttributeError(
                (f"String-commands for `map_centre` only support "
                 f"`vr_centre_of_potential`. Got {map_centre} instead."))
        elif (type(map_centre) is list
              or type(map_centre) is np.ndarray) and len(map_centre) != 3:
            raise AttributeError(
                (f"List-commands for `map_centre` only support "
                 f"length-3 lists. Got {map_centre} "
                 f"(length {len(map_centre)}) instead."))

        self.map_centre = map_centre

        centre_of_potential = [
            vr_data.positions.xcminpot[0].to('Mpc') / vr_data.a,
            vr_data.positions.ycminpot[0].to('Mpc') / vr_data.a,
            vr_data.positions.zcminpot[0].to('Mpc') / vr_data.a
        ]

        if self.map_centre == 'vr_centre_of_potential':
            _xCen = vr_data.positions.xcminpot[0].to('Mpc') / vr_data.a
            _yCen = vr_data.positions.ycminpot[0].to('Mpc') / vr_data.a
            _zCen = vr_data.positions.zcminpot[0].to('Mpc') / vr_data.a

        elif type(self.map_centre) is list or type(
                self.map_centre) is np.ndarray:
            _xCen = self.map_centre[0] * Mpc / vr_data.a
            _yCen = self.map_centre[1] * Mpc / vr_data.a
            _zCen = self.map_centre[2] * Mpc / vr_data.a

        if xlargs.debug:
            print(
                f"Centre of potential: {[float(f'{i.v:.3f}') for i in centre_of_potential]} Mpc"
            )
            print(
                f"Map centre: {[float(f'{i.v:.3f}') for i in [_xCen, _yCen, _zCen]]} Mpc"
            )

        _r500 = vr_data.spherical_overdensities.r_500_rhocrit[0].to(
            'Mpc') / vr_data.a

        region = [
            _xCen - mask_radius_r500 / np.sqrt(3) * _r500,
            _xCen + mask_radius_r500 / np.sqrt(3) * _r500,
            _yCen - mask_radius_r500 / np.sqrt(3) * _r500,
            _yCen + mask_radius_r500 / np.sqrt(3) * _r500
        ]

        if not hasattr(sw_data.dark_matter, 'smoothing_lengths'):
            print('Generate smoothing lengths for the dark matter')
            sw_data.dark_matter.smoothing_lengths = generate_smoothing_lengths(
                sw_data.dark_matter.coordinates,
                sw_data.metadata.boxsize,
                kernel_gamma=kernel_gamma * 0.7,
                neighbours=57,
                speedup_fac=1,
                dimension=3,
            )

        if depth is not None:

            depth = min(depth * Mpc, mask_radius_r500 * np.sqrt(3) * _r500)

            depth_filter = np.where(
                (sw_data.dark_matter.coordinates[:, -1] > _zCen - depth / 2)
                & (sw_data.dark_matter.coordinates[:,
                                                   -1] < _zCen + depth / 2))[0]

            if xlargs.debug:
                percent = f"{len(depth_filter) / len(sw_data.dark_matter.coordinates) * 100:.1f}"
                print((
                    f"Filtering particles by depth: +/- {depth:.2f}/2  Mpc.\n"
                    f"Total particles: {len(sw_data.dark_matter.coordinates)}\n"
                    f"Particles within bounds: {len(depth_filter)} = {percent} %"
                ))

            sw_data.dark_matter.coordinates = sw_data.dark_matter.coordinates[
                depth_filter]
            sw_data.dark_matter.smoothing_lengths = sw_data.dark_matter.smoothing_lengths[
                depth_filter]
            sw_data.dark_matter.masses = sw_data.dark_matter.masses[
                depth_filter]

        # Note here that we pass in the dark matter dataset not the whole
        # data object, to specify what particle type we wish to visualise
        dm_map = project_pixel_grid(project="masses",
                                    data=sw_data.dark_matter,
                                    resolution=self.resolution,
                                    parallel=self.parallel,
                                    region=region,
                                    backend=self.backend,
                                    boxsize=sw_data.metadata.boxsize)

        dm_map = np.ma.array(dm_map,
                             mask=(dm_map <= 0.),
                             fill_value=np.nan,
                             copy=True,
                             dtype=np.float64)

        output_values = [
            dm_map, region, dimensionless / Mpc**2, [_xCen, _yCen, _zCen],
            _r500, sw_data.metadata.z
        ]
        output_names = ['map', 'region', 'units', 'centre', 'r500', 'z']
        if return_type is tuple:
            output = tuple(output_values)
        elif return_type is dict:
            output = dict(zip(output_names, output_values))
        elif return_type == 'class':
            OutputClass = namedtuple('OutputClass', output_names)
            output = OutputClass(*output_values)
        else:
            raise TypeError(f"Return type {return_type} not recognised.")

        return output