Esempio n. 1
0
    def _get_particle_type_counts(self):
        """Reads the active number of particles for every species.

        Returns
        -------
        dict
            keys are ptypes
            values are integer counts of the ptype
        """
        result = {}
        f = self.dataset._handle
        bp = self.dataset.base_path
        pp = self.dataset.particles_path

        try:
            for ptype in self.ds.particle_types_raw:
                if str(ptype) == "io":
                    spec = list(f[bp + pp].keys())[0]
                else:
                    spec = ptype
                axis = list(f[bp + pp + "/" + spec + "/position"].keys())[0]
                pos = f[bp + pp + "/" + spec + "/position/" + axis]
                if is_const_component(pos):
                    result[ptype] = pos.attrs["shape"]
                else:
                    result[ptype] = pos.len()
        except (KeyError):
            result["io"] = 0

        return result
Esempio n. 2
0
    def _fill_cache(self, ptype, index=0, offset=None):
        """Fills the particle position cache for the ``ptype``.

        Parameters
        ----------
        ptype : str
            The on-disk name of the particle species
        index : int, optional
        offset : int, optional
        """
        if str((ptype, index, offset)) not in self._cached_ptype:
            self._cached_ptype = str((ptype, index, offset))
            pds = self._handle[self.base_path + self.particles_path + "/" + ptype]
            axes = list(pds["position"].keys())
            if offset is None:
                if is_const_component(pds["position/" + axes[0]]):
                    offset = pds["position/" + axes[0]].attrs["shape"]
                else:
                    offset = pds["position/" + axes[0]].len()
            self.cache = np.empty((3, offset), dtype=np.float64)
            for i in np.arange(3):
                ax = "xyz"[i]
                if ax in axes:
                    np.add(
                        get_component(pds, "position/" + ax, index, offset),
                        get_component(pds, "positionOffset/" + ax, index, offset),
                        self.cache[i],
                    )
                else:
                    # Pad accordingly with zeros to make 1D/2D datasets compatible
                    # These have to be the same shape as the existing axes since that
                    # equals the number of particles
                    self.cache[i] = np.zeros(offset)
Esempio n. 3
0
    def _count_grids(self):
        """Sets ``self.num_grids`` to be the total number of grids in the simulation.

        The number of grids is determined by their respective memory footprint.
        """
        f = self.dataset._handle
        bp = self.dataset.base_path
        mp = self.dataset.meshes_path
        pp = self.dataset.particles_path

        self.meshshapes = {}
        self.numparts = {}

        self.num_grids = 0

        try:
            meshes = f[bp + mp]
            for mname in meshes.keys():
                mesh = meshes[mname]
                if isinstance(mesh, h5.Group):
                    shape = mesh[list(mesh.keys())[0]].shape
                else:
                    shape = mesh.shape
                spacing = tuple(mesh.attrs["gridSpacing"])
                offset = tuple(mesh.attrs["gridGlobalOffset"])
                unit_si = mesh.attrs["gridUnitSI"]
                self.meshshapes[mname] = (shape, spacing, offset, unit_si)
        except (KeyError, TypeError, AttributeError):
            pass
        try:
            particles = f[bp + pp]
            for pname in particles.keys():
                species = particles[pname]
                if "particlePatches" in species.keys():
                    for (patch, size) in enumerate(
                        species["/particlePatches/numParticles"]
                    ):
                        self.numparts[pname + "#" + str(patch)] = size
                else:
                    axis = list(species["/position"].keys())[0]
                    if is_const_component(species["/position/" + axis]):
                        self.numparts[pname] = species["/position/" + axis].attrs[
                            "shape"
                        ]
                    else:
                        self.numparts[pname] = species["/position/" + axis].len()
        except (KeyError, TypeError, AttributeError):
            pass

        # Limit values per grid by resulting memory footprint
        self.vpg = int(self.dataset.gridsize / 4)  # 4Byte per value (f32)

        # Meshes of the same size do not need separate chunks
        for shape, *_ in set(self.meshshapes.values()):
            self.num_grids += min(
                shape[0], int(np.ceil(reduce(mul, shape) * self.vpg ** -1))
            )

        # Same goes for particle chunks if they are not inside particlePatches
        patches = {}
        no_patches = {}
        for (k, v) in self.numparts.items():
            if "#" in k:
                patches[k] = v
            else:
                no_patches[k] = v
        for size in set(no_patches.values()):
            self.num_grids += int(np.ceil(size * self.vpg ** -1))
        for size in patches.values():
            self.num_grids += int(np.ceil(size * self.vpg ** -1))
Esempio n. 4
0
    def _detect_output_fields(self):
        """Populates ``self.field_list`` with native fields (mesh and particle) on disk.

        Each entry is a tuple of two strings. The first element is the on-disk fluid type or particle type.
        The second element is the name of the field in yt. This string is later used for accessing the data.
        Convention suggests that the on-disk fluid type should be "openPMD",
        the on-disk particle type (for a single species of particles) is "io"
        or (for multiple species of particles) the particle name on-disk.
        """
        f = self.dataset._handle
        bp = self.dataset.base_path
        mp = self.dataset.meshes_path
        pp = self.dataset.particles_path

        mesh_fields = []
        try:
            meshes = f[bp + mp]
            for mname in meshes.keys():
                try:
                    mesh = meshes[mname]
                    for axis in mesh.keys():
                        mesh_fields.append(mname.replace("_", "-") + "_" + axis)
                except AttributeError:
                    # This is a h5.Dataset (i.e. no axes)
                    mesh_fields.append(mname.replace("_", "-"))
        except (KeyError, TypeError, AttributeError):
            pass
        self.field_list = [("openPMD", str(field)) for field in mesh_fields]

        particle_fields = []
        try:
            particles = f[bp + pp]
            for pname in particles.keys():
                species = particles[pname]
                for recname in species.keys():
                    record = species[recname]
                    if is_const_component(record):
                        # Record itself (e.g. particle_mass) is constant
                        particle_fields.append(
                            pname.replace("_", "-") + "_" + recname.replace("_", "-")
                        )
                    elif "particlePatches" not in recname:
                        try:
                            # Create a field for every axis (x,y,z) of every property (position)
                            # of every species (electrons)
                            axes = list(record.keys())
                            if str(recname) == "position":
                                recname = "positionCoarse"
                            for axis in axes:
                                particle_fields.append(
                                    pname.replace("_", "-")
                                    + "_"
                                    + recname.replace("_", "-")
                                    + "_"
                                    + axis
                                )
                        except AttributeError:
                            # Record is a dataset, does not have axes (e.g. weighting)
                            particle_fields.append(
                                pname.replace("_", "-")
                                + "_"
                                + recname.replace("_", "-")
                            )
                            pass
                    else:
                        pass
            if len(list(particles.keys())) > 1:
                # There is more than one particle species, use the specific names as field types
                self.field_list.extend(
                    [
                        (
                            str(field).split("_")[0],
                            ("particle_" + "_".join(str(field).split("_")[1:])),
                        )
                        for field in particle_fields
                    ]
                )
            else:
                # Only one particle species, fall back to "io"
                self.field_list.extend(
                    [
                        ("io", ("particle_" + "_".join(str(field).split("_")[1:])))
                        for field in particle_fields
                    ]
                )
        except (KeyError, TypeError, AttributeError):
            pass
Esempio n. 5
0
    def __init__(self, ds, field_list):
        f = ds._handle
        bp = ds.base_path
        mp = ds.meshes_path
        pp = ds.particles_path

        try:
            fields = f[bp + mp]
            for fname in fields.keys():
                field = fields[fname]
                if type(field) is h5.Dataset or is_const_component(field):
                    # Don't consider axes. This appears to be a vector field of single dimensionality
                    ytname = str("_".join([fname.replace("_", "-")]))
                    parsed = parse_unit_dimension(np.asarray(field.attrs["unitDimension"], dtype=np.int))
                    unit = str(YTQuantity(1, parsed).units)
                    aliases = []
                    # Save a list of magnetic fields for aliasing later on
                    # We can not reasonably infer field type/unit by name in openPMD
                    if unit == "T" or unit == "kg/(A*s**2)":
                        self._mag_fields.append(ytname)
                    self.known_other_fields += ((ytname, (unit, aliases, None)),)
                else:
                    for axis in field.keys():
                        ytname = str("_".join([fname.replace("_", "-"), axis]))
                        parsed = parse_unit_dimension(np.asarray(field.attrs["unitDimension"], dtype=np.int))
                        unit = str(YTQuantity(1, parsed).units)
                        aliases = []
                        # Save a list of magnetic fields for aliasing later on
                        # We can not reasonably infer field type by name in openPMD
                        if unit == "T" or unit == "kg/(A*s**2)":
                            self._mag_fields.append(ytname)
                        self.known_other_fields += ((ytname, (unit, aliases, None)),)
            for i in self.known_other_fields:
                mylog.debug("open_pmd - known_other_fields - {}".format(i))
        except(KeyError, TypeError, AttributeError):
            pass

        try:
            particles = f[bp + pp]
            for pname in particles.keys():
                species = particles[pname]
                for recname in species.keys():
                    try:
                        record = species[recname]
                        parsed = parse_unit_dimension(record.attrs["unitDimension"])
                        unit = str(YTQuantity(1, parsed).units)
                        ytattrib = str(recname).replace("_", "-")
                        if ytattrib == "position":
                            # Symbolically rename position to preserve yt's interpretation of the pfield
                            # particle_position is later derived in setup_absolute_positions in the way yt expects it
                            ytattrib = "positionCoarse"
                        if type(record) is h5.Dataset or is_const_component(record):
                            name = ["particle", ytattrib]
                            self.known_particle_fields += ((str("_".join(name)), (unit, [], None)),)
                        else:
                            for axis in record.keys():
                                aliases = []
                                name = ["particle", ytattrib, axis]
                                ytname = str("_".join(name))
                                self.known_particle_fields += ((ytname, (unit, aliases, None)),)
                    except(KeyError):
                        if recname != "particlePatches":
                            mylog.info("open_pmd - {}_{} does not seem to have unitDimension".format(pname, recname))
            for i in self.known_particle_fields:
                mylog.debug("open_pmd - known_particle_fields - {}".format(i))
        except(KeyError, TypeError, AttributeError):
            pass

        super(OpenPMDFieldInfo, self).__init__(ds, field_list)