def _vorticity_y(field, data):
     f  = (data[ftype, "velocity_x"][sl_center,sl_center,sl_right] -
           data[ftype, "velocity_x"][sl_center,sl_center,sl_left]) \
           / (div_fac*just_one(data["index", "dz"]))
     f -= (data[ftype, "velocity_z"][sl_right,sl_center,sl_center] -
           data[ftype, "velocity_z"][sl_left,sl_center,sl_center]) \
           / (div_fac*just_one(data["index", "dx"]))
     new_field = data.ds.arr(np.zeros_like(data[ftype, "velocity_z"],
                                           dtype=np.float64),
                             f.units)
     new_field[sl_center, sl_center, sl_center] = f
     return new_field
 def _divergence(field, data):
     ds = div_fac * just_one(data["index", "dx"])
     f  = data[xn][sl_right,1:-1,1:-1]/ds
     f -= data[xn][sl_left ,1:-1,1:-1]/ds
     ds = div_fac * just_one(data["index", "dy"])
     f += data[yn][1:-1,sl_right,1:-1]/ds
     f -= data[yn][1:-1,sl_left ,1:-1]/ds
     ds = div_fac * just_one(data["index", "dz"])
     f += data[zn][1:-1,1:-1,sl_right]/ds
     f -= data[zn][1:-1,1:-1,sl_left ]/ds
     new_field = data.ds.arr(np.zeros(data[xn].shape, dtype=np.float64),
                             f.units)        
     new_field[1:-1,1:-1,1:-1] = f
     return new_field
Esempio n. 3
0
    def _store_fields(self, storage_object, field_data, **kwargs):
        root_only = kwargs.get("root_only", True)

        if not field_data: return
        root_field_data = dict([(field, just_one(field_data[field]))
                                for field in field_data])
        if not root_only:
            storage_object._tree_field_data.update(field_data)
        storage_object._root_field_data.update(root_field_data)
Esempio n. 4
0
def test_just_one():
    # Check that behaviour of this function is consistent before and after refactor
    # PR 2893
    for unit in ["mm", "cm", "km", "pc", "g", "kg", "M_sun"]:
        obj = YTArray([0.0, 1.0], unit)
        expected = YTQuantity(obj.flat[0],
                              obj.units,
                              registry=obj.units.registry)
        jo = just_one(obj)
        assert jo == expected
    def _set_code_unit_attributes(self):
        """
        Generates the conversion to various physical _units
        based on the parameter file
        """

        # This should be improved.
        h5f = h5py.File(self.parameter_filename, "r")
        for field_name in h5f["/field_types"]:
            current_field = h5f["/field_types/%s" % field_name]
            if 'field_to_cgs' in current_field.attrs:
                field_conv = current_field.attrs['field_to_cgs']
                self.field_units[field_name] = just_one(field_conv)
            elif 'field_units' in current_field.attrs:
                field_units = current_field.attrs['field_units']
                if isinstance(field_units, str):
                    current_field_units = current_field.attrs['field_units']
                else:
                    current_field_units = \
                        just_one(current_field.attrs['field_units'])
                self.field_units[field_name] = current_field_units
            else:
                self.field_units[field_name] = ""

        if "dataset_units" in h5f:
            for unit_name in h5f["/dataset_units"]:
                current_unit = h5f["/dataset_units/%s" % unit_name]
                value = current_unit.value
                unit = current_unit.attrs["unit"]
                setattr(self, unit_name, self.quan(value, unit))
                if unit_name in h5f["/field_types"]:
                    if unit_name in self.field_units:
                        mylog.warning(
                            "'field_units' was overridden by 'dataset_units/%s'"
                            % (unit_name))
                    self.field_units[unit_name] = unit
        else:
            self.length_unit = self.quan(1.0, "cm")
            self.mass_unit = self.quan(1.0, "g")
            self.time_unit = self.quan(1.0, "s")

        h5f.close()
    def _averaged_field(field, data):
        nx, ny, nz = data[(ftype, basename)].shape
        new_field = data.ds.arr(np.zeros((nx-2, ny-2, nz-2), dtype=np.float64),
                                (just_one(data[(ftype, basename)]) *
                                 just_one(data[(ftype, weight)])).units)
        weight_field = data.ds.arr(np.zeros((nx-2, ny-2, nz-2),
                                            dtype=np.float64),
                                   data[(ftype, weight)].units)
        i_i, j_i, k_i = np.mgrid[0:3, 0:3, 0:3]

        for i, j, k in zip(i_i.ravel(), j_i.ravel(), k_i.ravel()):
            sl = (slice(i, nx-(2-i)), slice(j, ny-(2-j)), slice(k, nz-(2-k)))
            new_field += data[(ftype, basename)][sl] * data[(ftype, weight)][sl]
            weight_field += data[(ftype, weight)][sl]

        # Now some fancy footwork
        new_field2 = data.ds.arr(np.zeros((nx, ny, nz)), 
                                 data[(ftype, basename)].units)
        new_field2[1:-1, 1:-1, 1:-1] = new_field / weight_field
        return new_field2
    def _averaged_field(field, data):
        nx, ny, nz = data[(ftype, basename)].shape
        new_field = data.ds.arr(np.zeros((nx-2, ny-2, nz-2), dtype=np.float64),
                                (just_one(data[(ftype, basename)]) *
                                 just_one(data[(ftype, weight)])).units)
        weight_field = data.ds.arr(np.zeros((nx-2, ny-2, nz-2),
                                            dtype=np.float64),
                                   data[(ftype, weight)].units)
        i_i, j_i, k_i = np.mgrid[0:3, 0:3, 0:3]

        for i, j, k in zip(i_i.ravel(), j_i.ravel(), k_i.ravel()):
            sl = [slice(i, nx-(2-i)), slice(j, ny-(2-j)), slice(k, nz-(2-k))]
            new_field += data[(ftype, basename)][sl] * data[(ftype, weight)][sl]
            weight_field += data[(ftype, weight)][sl]

        # Now some fancy footwork
        new_field2 = data.ds.arr(np.zeros((nx, ny, nz)), 
                                 data[(ftype, basename)].units)
        new_field2[1:-1, 1:-1, 1:-1] = new_field / weight_field
        return new_field2
    def _set_code_unit_attributes(self):
        """
        Generates the conversion to various physical _units
        based on the parameter file
        """

        # This should be improved.
        h5f = h5py.File(self.parameter_filename, "r")
        for field_name in h5f["/field_types"]:
            current_field = h5f["/field_types/%s" % field_name]
            if 'field_to_cgs' in current_field.attrs:
                field_conv = current_field.attrs['field_to_cgs']
                self.field_units[field_name] = just_one(field_conv)
            elif 'field_units' in current_field.attrs:
                field_units = current_field.attrs['field_units']
                if isinstance(field_units, str):
                    current_field_units = current_field.attrs['field_units']
                else:
                    current_field_units = \
                        just_one(current_field.attrs['field_units'])
                self.field_units[field_name] = current_field_units
            else:
                self.field_units[field_name] = ""

        if "dataset_units" in h5f:
            for unit_name in h5f["/dataset_units"]:
                current_unit = h5f["/dataset_units/%s" % unit_name]
                value = current_unit.value
                unit = current_unit.attrs["unit"]
                setattr(self, unit_name, self.quan(value, unit))
                if unit_name in h5f["/field_types"]:
                    if unit_name in self.field_units:
                        mylog.warning("'field_units' was overridden by 'dataset_units/%s'"
                                      % (unit_name))
                    self.field_units[unit_name] = unit
        else:
            self.length_unit = self.quan(1.0, "cm")
            self.mass_unit = self.quan(1.0, "g")
            self.time_unit = self.quan(1.0, "s")

        h5f.close()
Esempio n. 9
0
    def _averaged_field(field, data):
        def atleast_4d(array):
            if array.ndim == 3:
                return array[..., None]
            else:
                return array

        nx, ny, nz, ngrids = atleast_4d(data[(ftype, basename)]).shape
        new_field = data.ds.arr(
            np.zeros((nx - 2, ny - 2, nz - 2, ngrids), dtype=np.float64),
            (just_one(data[(ftype, basename)]) * just_one(data[(ftype, weight)])).units,
        )
        weight_field = data.ds.arr(
            np.zeros((nx - 2, ny - 2, nz - 2, ngrids), dtype=np.float64),
            data[(ftype, weight)].units,
        )
        i_i, j_i, k_i = np.mgrid[0:3, 0:3, 0:3]

        for i, j, k in zip(i_i.ravel(), j_i.ravel(), k_i.ravel()):
            sl = (
                slice(i, nx - (2 - i)),
                slice(j, ny - (2 - j)),
                slice(k, nz - (2 - k)),
            )
            new_field += (
                atleast_4d(data[(ftype, basename)])[sl]
                * atleast_4d(data[(ftype, weight)])[sl]
            )
            weight_field += atleast_4d(data[(ftype, weight)])[sl]

        # Now some fancy footwork
        new_field2 = data.ds.arr(
            np.zeros((nx, ny, nz, ngrids)), data[(ftype, basename)].units
        )
        new_field2[1:-1, 1:-1, 1:-1] = new_field / weight_field

        if data[(ftype, basename)].ndim == 3:
            return new_field2[..., 0]
        else:
            return new_field2
Esempio n. 10
0
    def _shear(field, data):
        """
        Shear is defined as [(dvx/dy + dvy/dx)^2 + (dvz/dy + dvy/dz)^2 +
                             (dvx/dz + dvz/dx)^2 ]^(0.5)
        where dvx/dy = [vx(j-1) - vx(j+1)]/[2dy]
        and is in units of s^(-1)
        (it's just like vorticity except add the derivative pairs instead
         of subtracting them)
        """

        if data.ds.geometry != "cartesian":
            raise NotImplementedError(
                "shear is only supported in cartesian geometries")

        try:
            vx = data[ftype, "relative_velocity_x"]
            vy = data[ftype, "relative_velocity_y"]
        except YTFieldNotFound as e:
            raise YTDimensionalityError(
                "shear computation requires 2 velocity components") from e

        dvydx = (vy[sl_right, sl_center, sl_center] -
                 vy[sl_left, sl_center, sl_center]) / (
                     div_fac * just_one(data["index", "dx"]))
        dvxdy = (vx[sl_center, sl_right, sl_center] -
                 vx[sl_center, sl_left, sl_center]) / (
                     div_fac * just_one(data["index", "dy"]))
        f = (dvydx + dvxdy)**2.0
        del dvydx, dvxdy

        try:
            vz = data[ftype, "relative_velocity_z"]
            dvzdy = (vz[sl_center, sl_right, sl_center] -
                     vz[sl_center, sl_left, sl_center]) / (
                         div_fac * just_one(data["index", "dy"]))
            dvydz = (vy[sl_center, sl_center, sl_right] -
                     vy[sl_center, sl_center, sl_left]) / (
                         div_fac * just_one(data["index", "dz"]))
            f += (dvzdy + dvydz)**2.0
            del dvzdy, dvydz
            dvxdz = (vx[sl_center, sl_center, sl_right] -
                     vx[sl_center, sl_center, sl_left]) / (
                         div_fac * just_one(data["index", "dz"]))
            dvzdx = (vz[sl_right, sl_center, sl_center] -
                     vz[sl_left, sl_center, sl_center]) / (
                         div_fac * just_one(data["index", "dx"]))
            f += (dvxdz + dvzdx)**2.0
            del dvxdz, dvzdx
        except YTFieldNotFound:
            # the absence of a z velocity component is not blocking
            pass

        np.sqrt(f, out=f)
        new_field = data.ds.arr(np.zeros_like(data[ftype, "velocity_x"]),
                                f.units)
        new_field[sl_center, sl_center, sl_center] = f
        return new_field
Esempio n. 11
0
 def _parse_parameter_file(self):
     self._handle = h5py.File(self.parameter_filename, mode="r")
     if "data_software" in self._handle["gridded_data_format"].attrs:
         self.data_software = self._handle["gridded_data_format"].attrs[
             "data_software"
         ]
     else:
         self.data_software = "unknown"
     sp = self._handle["/simulation_parameters"].attrs
     if self.geometry is None:
         geometry = just_one(sp.get("geometry", 0))
         try:
             self.geometry = GEOMETRY_TRANS[geometry]
         except KeyError as e:
             raise YTGDFUnknownGeometry(geometry) from e
     self.parameters.update(sp)
     self.domain_left_edge = sp["domain_left_edge"][:]
     self.domain_right_edge = sp["domain_right_edge"][:]
     self.domain_dimensions = sp["domain_dimensions"][:]
     refine_by = sp["refine_by"]
     if refine_by is None:
         refine_by = 2
     self.refine_by = refine_by
     self.dimensionality = sp["dimensionality"]
     self.current_time = sp["current_time"]
     self.unique_identifier = sp["unique_identifier"]
     self.cosmological_simulation = sp["cosmological_simulation"]
     if sp["num_ghost_zones"] != 0:
         raise RuntimeError
     self.num_ghost_zones = sp["num_ghost_zones"]
     self.field_ordering = sp["field_ordering"]
     self.boundary_conditions = sp["boundary_conditions"][:]
     self._periodicity = tuple(bnd == 0 for bnd in self.boundary_conditions[::2])
     if self.cosmological_simulation:
         self.current_redshift = sp["current_redshift"]
         self.omega_lambda = sp["omega_lambda"]
         self.omega_matter = sp["omega_matter"]
         self.hubble_constant = sp["hubble_constant"]
     else:
         self.current_redshift = 0.0
         self.omega_lambda = 0.0
         self.omega_matter = 0.0
         self.hubble_constant = 0.0
         self.cosmological_simulation = 0
     self.parameters["Time"] = 1.0  # Hardcode time conversion for now.
     # Hardcode for now until field staggering is supported.
     self.parameters["HydroMethod"] = 0
     self._handle.close()
     del self._handle
 def _parse_parameter_file(self):
     self._handle = h5py.File(self.parameter_filename, "r")
     if 'data_software' in self._handle['gridded_data_format'].attrs:
         self.data_software = \
             self._handle['gridded_data_format'].attrs['data_software']
     else:
         self.data_software = "unknown"
     sp = self._handle["/simulation_parameters"].attrs
     if self.geometry is None:
         geometry = just_one(sp.get("geometry", 0))
         try:
             self.geometry = GEOMETRY_TRANS[geometry]
         except KeyError:
             raise YTGDFUnknownGeometry(geometry)
     self.parameters.update(sp)
     self.domain_left_edge = sp["domain_left_edge"][:]
     self.domain_right_edge = sp["domain_right_edge"][:]
     self.domain_dimensions = sp["domain_dimensions"][:]
     refine_by = sp["refine_by"]
     if refine_by is None:
         refine_by = 2
     self.refine_by = refine_by
     self.dimensionality = sp["dimensionality"]
     self.current_time = sp["current_time"]
     self.unique_identifier = sp["unique_identifier"]
     self.cosmological_simulation = sp["cosmological_simulation"]
     if sp["num_ghost_zones"] != 0:
         raise RuntimeError
     self.num_ghost_zones = sp["num_ghost_zones"]
     self.field_ordering = sp["field_ordering"]
     self.boundary_conditions = sp["boundary_conditions"][:]
     p = [bnd == 0 for bnd in self.boundary_conditions[::2]]
     self.periodicity = ensure_tuple(p)
     if self.cosmological_simulation:
         self.current_redshift = sp["current_redshift"]
         self.omega_lambda = sp["omega_lambda"]
         self.omega_matter = sp["omega_matter"]
         self.hubble_constant = sp["hubble_constant"]
     else:
         self.current_redshift = self.omega_lambda = self.omega_matter = \
             self.hubble_constant = self.cosmological_simulation = 0.0
     self.parameters['Time'] = 1.0  # Hardcode time conversion for now.
     # Hardcode for now until field staggering is supported.
     self.parameters["HydroMethod"] = 0
     self._handle.close()
     del self._handle
Esempio n. 13
0
 def _shear(field, data):
     """
     Shear is defined as [(dvx/dy + dvy/dx)^2 + (dvz/dy + dvy/dz)^2 +
                          (dvx/dz + dvz/dx)^2 ]^(0.5)
     where dvx/dy = [vx(j-1) - vx(j+1)]/[2dy]
     and is in units of s^(-1)
     (it's just like vorticity except add the derivative pairs instead
      of subtracting them)
     """
     if data.ds.dimensionality > 1:
         vx = data[ftype, "relative_velocity_x"]
         vy = data[ftype, "relative_velocity_y"]
         dvydx = ((vy[sl_right, sl_center, sl_center] -
                   vy[sl_left, sl_center, sl_center]) /
                  (div_fac * just_one(data["index", "dx"])))
         dvxdy = ((vx[sl_center, sl_right, sl_center] -
                   vx[sl_center, sl_left, sl_center]) /
                  (div_fac * just_one(data["index", "dy"])))
         f = (dvydx + dvxdy)**2.0
         del dvydx, dvxdy
     if data.ds.dimensionality > 2:
         vz = data[ftype, "relative_velocity_z"]
         dvzdy = ((vz[sl_center, sl_right, sl_center] -
                   vz[sl_center, sl_left, sl_center]) /
                  (div_fac * just_one(data["index", "dy"])))
         dvydz = ((vy[sl_center, sl_center, sl_right] -
                   vy[sl_center, sl_center, sl_left]) /
                  (div_fac * just_one(data["index", "dz"])))
         f += (dvzdy + dvydz)**2.0
         del dvzdy, dvydz
         dvxdz = ((vx[sl_center, sl_center, sl_right] -
                   vx[sl_center, sl_center, sl_left]) /
                  (div_fac * just_one(data["index", "dz"])))
         dvzdx = ((vz[sl_right, sl_center, sl_center] -
                   vz[sl_left, sl_center, sl_center]) /
                  (div_fac * just_one(data["index", "dx"])))
         f += (dvxdz + dvzdx)**2.0
         del dvxdz, dvzdx
     np.sqrt(f, out=f)
     new_field = data.ds.arr(np.zeros_like(data[ftype, "velocity_x"]),
                             f.units)
     new_field[sl_center, sl_center, sl_center] = f
     return new_field
 def _shear(field, data):
     """
     Shear is defined as [(dvx/dy + dvy/dx)^2 + (dvz/dy + dvy/dz)^2 +
                          (dvx/dz + dvz/dx)^2 ]^(0.5)
     where dvx/dy = [vx(j-1) - vx(j+1)]/[2dy]
     and is in units of s^(-1)
     (it's just like vorticity except add the derivative pairs instead
      of subtracting them)
     """
     
     if data.ds.dimensionality > 1:
         dvydx = (data[ftype, "velocity_y"][sl_right,sl_center,sl_center] -
                 data[ftype, "velocity_y"][sl_left,sl_center,sl_center]) \
                 / (div_fac*just_one(data["index", "dx"]))
         dvxdy = (data[ftype, "velocity_x"][sl_center,sl_right,sl_center] -
                 data[ftype, "velocity_x"][sl_center,sl_left,sl_center]) \
                 / (div_fac*just_one(data["index", "dy"]))
         f  = (dvydx + dvxdy)**2.0
         del dvydx, dvxdy
     if data.ds.dimensionality > 2:
         dvzdy = (data[ftype, "velocity_z"][sl_center,sl_right,sl_center] -
                 data[ftype, "velocity_z"][sl_center,sl_left,sl_center]) \
                 / (div_fac*just_one(data["index", "dy"]))
         dvydz = (data[ftype, "velocity_y"][sl_center,sl_center,sl_right] -
                 data[ftype, "velocity_y"][sl_center,sl_center,sl_left]) \
                 / (div_fac*just_one(data["index", "dz"]))
         f += (dvzdy + dvydz)**2.0
         del dvzdy, dvydz
         dvxdz = (data[ftype, "velocity_x"][sl_center,sl_center,sl_right] -
                 data[ftype, "velocity_x"][sl_center,sl_center,sl_left]) \
                 / (div_fac*just_one(data["index", "dz"]))
         dvzdx = (data[ftype, "velocity_z"][sl_right,sl_center,sl_center] -
                 data[ftype, "velocity_z"][sl_left,sl_center,sl_center]) \
                 / (div_fac*just_one(data["index", "dx"]))
         f += (dvxdz + dvzdx)**2.0
         del dvxdz, dvzdx
     np.sqrt(f, out=f)
     new_field = data.ds.arr(np.zeros_like(data[ftype, "velocity_x"]), f.units)
     new_field[sl_center, sl_center, sl_center] = f
     return new_field