def test_base_equivalent():
    """
    Check base equivalent of a unit.

    """
    Msun_cgs = mass_sun_grams
    Mpc_cgs = cm_per_mpc

    u1 = Unit("Msun * Mpc**-3")
    u2 = Unit("g * cm**-3")
    u3 = u1.get_base_equivalent()

    yield assert_true, u2.expr == u3.expr
    yield assert_true, u2 == u3

    yield assert_allclose_units, u1.base_value, Msun_cgs / Mpc_cgs**3, 1e-12
    yield assert_true, u2.base_value == 1
    yield assert_true, u3.base_value == 1

    mass_density = mass / length**3

    yield assert_true, u1.dimensions == mass_density
    yield assert_true, u2.dimensions == mass_density
    yield assert_true, u3.dimensions == mass_density

    yield assert_allclose_units, get_conversion_factor(u1, u3)[0], \
        Msun_cgs / Mpc_cgs**3, 1e-12
    def setup_particle_fields(self, ptype, ftype='gas', num_neighbors=64 ):
        
	skip_output_units = ("code_length",)
        for f, (units, aliases, dn) in sorted(self.known_particle_fields):
            units = self.ds.field_units.get((ptype, f), units)
            if (f in aliases or ptype not in self.ds.particle_types_raw) and \
                units not in skip_output_units:
                u = Unit(units, registry = self.ds.unit_registry)
                output_units = str(u.get_cgs_equivalent())
            else:
                output_units = units
            if (ptype, f) not in self.field_list:
                continue
            self.add_output_field((ptype, f),
                units = units, particle_type = True,
                display_name = dn, output_units = output_units)
            for alias in aliases:
                self.alias((ptype, alias), (ptype, f), units = output_units)

        # We'll either have particle_position or particle_position_[xyz]
        if (ptype, "particle_position") in self.field_list or \
           (ptype, "particle_position") in self.field_aliases:
            particle_scalar_functions(ptype,
                   "particle_position", "particle_velocity",
                   self)
        else:
            # We need to check to make sure that there's a "known field" that
            # overlaps with one of the vector fields.  For instance, if we are
            # in the Stream frontend, and we have a set of scalar position
            # fields, they will overlap with -- and be overridden by -- the
            # "known" vector field that the frontend creates.  So the easiest
            # thing to do is to simply remove the on-disk field (which doesn't
            # exist) and replace it with a derived field.
            if (ptype, "particle_position") in self and \
                 self[ptype, "particle_position"]._function == NullFunc:
                self.pop((ptype, "particle_position"))
            particle_vector_functions(ptype,
                    ["particle_position_%s" % ax for ax in 'xyz'],
                    ["particle_velocity_%s" % ax for ax in 'xyz'],
                    self)
        particle_deposition_functions(ptype, "particle_position",
            "particle_mass", self)
        standard_particle_fields(self, ptype)
        # Now we check for any leftover particle fields
        for field in sorted(self.field_list):
            if field in self: continue
            if not isinstance(field, tuple):
                raise RuntimeError
            if field[0] not in self.ds.particle_types:
                continue
            self.add_output_field(field, 
                                  units = self.ds.field_units.get(field, ""),
                                  particle_type = True)
        self.setup_smoothed_fields(ptype, 
                                   num_neighbors=num_neighbors,
                                   ftype=ftype)
 def alias(self, alias_name, original_name, units = None):
     if original_name not in self: return
     if units is None:
         # We default to CGS here, but in principle, this can be pluggable
         # as well.
         u = Unit(self[original_name].units,
                   registry = self.ds.unit_registry)
         units = str(u.get_cgs_equivalent())
     self.field_aliases[alias_name] = original_name
     self.add_field(alias_name,
         function = TranslationFunc(original_name),
         particle_type = self[original_name].particle_type,
         display_name = self[original_name].display_name,
         units = units)
Exemplo n.º 4
0
def test_string_representation():
    """
    Check unit string representation.

    """
    pc = Unit("pc")
    Myr = Unit("Myr")
    speed = pc / Myr
    dimensionless = Unit()

    assert_true(str(pc) == "pc")
    assert_true(str(Myr) == "Myr")
    assert_true(str(speed) == "pc/Myr")
    assert_true(repr(speed) == "pc/Myr")
    assert_true(str(dimensionless) == "dimensionless")
Exemplo n.º 5
0
def _sanitize_min_max_units(amin, amax, finfo, registry):
    # returns a copy of amin and amax, converted to finfo's output units
    umin = getattr(amin, 'units', None)
    umax = getattr(amax, 'units', None)
    if umin is None:
        umin = Unit(finfo.output_units, registry=registry)
        rmin = YTQuantity(amin, umin)
    else:
        rmin = amin.in_units(finfo.output_units)
    if umax is None:
        umax = Unit(finfo.output_units, registry=registry)
        rmax = YTQuantity(amax, umax)
    else:
        rmax = amax.in_units(finfo.output_units)
    return rmin, rmax
Exemplo n.º 6
0
def test_create_with_duplicate_dimensions():
    """
    Create units with overlapping dimensions. Ex: km/Mpc.

    """

    u1 = Unit("erg * s**-1")
    u2 = Unit("km/s/Mpc")
    km_cgs = cm_per_km
    Mpc_cgs = cm_per_mpc

    assert_true(u1.base_value == 1)
    assert_true(u1.dimensions == power)

    assert_allclose_units(u2.base_value, km_cgs / Mpc_cgs, 1e-12)
    assert_true(u2.dimensions == rate)
Exemplo n.º 7
0
def test_create_with_duplicate_dimensions():
    """
    Create units with overlapping dimensions. Ex: km/Mpc.

    """

    u1 = Unit("erg * s**-1")
    u2 = Unit("km/s/Mpc")
    km_cgs = cm_per_km
    Mpc_cgs = cm_per_mpc

    yield assert_true, u1.cgs_value == 1
    yield assert_true, u1.dimensions == power

    yield assert_allclose, u2.cgs_value, km_cgs / Mpc_cgs, 1e-12
    yield assert_true, u2.dimensions == rate
Exemplo n.º 8
0
def test_slice():
    fns = []
    grid_eps = np.finfo(np.float64).eps
    for nprocs in [8, 1]:
        # We want to test both 1 proc and 8 procs, to make sure that
        # parallelism isn't broken
        ds = fake_random_ds(64, nprocs=nprocs)
        dims = ds.domain_dimensions
        xn, yn, zn = ds.domain_dimensions
        dx = ds.arr(1.0 / (ds.domain_dimensions * 2), 'code_length')
        xi, yi, zi = ds.domain_left_edge + dx
        xf, yf, zf = ds.domain_right_edge - dx
        coords = np.mgrid[xi:xf:xn * 1j, yi:yf:yn * 1j, zi:zf:zn * 1j]
        uc = [np.unique(c) for c in coords]
        slc_pos = 0.5
        # Some simple slice tests with single grids
        for ax, an in enumerate("xyz"):
            xax = ds.coordinates.x_axis[ax]
            yax = ds.coordinates.y_axis[ax]
            for wf in ["density", None]:
                slc = ds.slice(ax, slc_pos)
                shifted_slc = ds.slice(ax, slc_pos + grid_eps)
                yield assert_equal, slc["ones"].sum(), slc["ones"].size
                yield assert_equal, slc["ones"].min(), 1.0
                yield assert_equal, slc["ones"].max(), 1.0
                yield assert_equal, np.unique(slc["px"]), uc[xax]
                yield assert_equal, np.unique(slc["py"]), uc[yax]
                yield assert_equal, np.unique(slc["pdx"]), 0.5 / dims[xax]
                yield assert_equal, np.unique(slc["pdy"]), 0.5 / dims[yax]
                pw = slc.to_pw(fields='density')
                for p in pw.plots.values():
                    tmpfd, tmpname = tempfile.mkstemp(suffix='.png')
                    os.close(tmpfd)
                    p.save(name=tmpname)
                    fns.append(tmpname)
                frb = slc.to_frb((1.0, 'unitary'), 64)
                shifted_frb = shifted_slc.to_frb((1.0, 'unitary'), 64)
                for slc_field in ['ones', 'density']:
                    fi = ds._get_field_info(slc_field)
                    yield assert_equal, frb[slc_field].info['data_source'], \
                        slc.__str__()
                    yield assert_equal, frb[slc_field].info['axis'], \
                        ax
                    yield assert_equal, frb[slc_field].info['field'], \
                        slc_field
                    yield assert_equal, frb[slc_field].units, \
                        Unit(fi.units)
                    yield assert_equal, frb[slc_field].info['xlim'], \
                        frb.bounds[:2]
                    yield assert_equal, frb[slc_field].info['ylim'], \
                        frb.bounds[2:]
                    yield assert_equal, frb[slc_field].info['center'], \
                        slc.center
                    yield assert_equal, frb[slc_field].info['coord'], \
                        slc_pos
                    yield assert_equal, frb[slc_field], \
                        shifted_frb[slc_field]
            # wf == None
            yield assert_equal, wf, None
    teardown_func(fns)
Exemplo n.º 9
0
def test_cutting_plane():
    fns = []
    for nprocs in [8, 1]:
        # We want to test both 1 proc and 8 procs, to make sure that
        # parallelism isn't broken
        ds = fake_random_ds(64, nprocs=nprocs)
        center = [0.5, 0.5, 0.5]
        normal = [1, 1, 1]
        cut = ds.cutting(normal, center)
        assert_equal(cut["ones"].sum(), cut["ones"].size)
        assert_equal(cut["ones"].min(), 1.0)
        assert_equal(cut["ones"].max(), 1.0)
        pw = cut.to_pw(fields="density")
        for p in pw.plots.values():
            tmpfd, tmpname = tempfile.mkstemp(suffix=".png")
            os.close(tmpfd)
            p.save(name=tmpname)
            fns.append(tmpname)
        for width in [(1.0, "unitary"), 1.0, ds.quan(0.5, "code_length")]:
            frb = cut.to_frb(width, 64)
            for cut_field in ["ones", "density"]:
                fi = ds._get_field_info("unknown", cut_field)
                data = frb[cut_field]
                assert_equal(data.info["data_source"], cut.__str__())
                assert_equal(data.info["axis"], 4)
                assert_equal(data.info["field"], cut_field)
                assert_equal(data.units, Unit(fi.units))
                assert_equal(data.info["xlim"], frb.bounds[:2])
                assert_equal(data.info["ylim"], frb.bounds[2:])
                assert_equal(data.info["length_to_cm"],
                             ds.length_unit.in_cgs())
                assert_equal(data.info["center"], cut.center)
    teardown_func(fns)
Exemplo n.º 10
0
def test_cutting_plane():
    fns = []
    for nprocs in [8, 1]:
        # We want to test both 1 proc and 8 procs, to make sure that
        # parallelism isn't broken
        ds = fake_random_ds(64, nprocs=nprocs)
        center = [0.5, 0.5, 0.5]
        normal = [1, 1, 1]
        cut = ds.cutting(normal, center)
        assert_equal(cut["ones"].sum(), cut["ones"].size)
        assert_equal(cut["ones"].min(), 1.0)
        assert_equal(cut["ones"].max(), 1.0)
        pw = cut.to_pw(fields='density')
        for p in pw.plots.values():
            tmpfd, tmpname = tempfile.mkstemp(suffix='.png')
            os.close(tmpfd)
            p.save(name=tmpname)
            fns.append(tmpname)
        for width in [(1.0, 'unitary'), 1.0, ds.quan(0.5, 'code_length')]:
            frb = cut.to_frb(width, 64)
            for cut_field in ['ones', 'density']:
                fi = ds._get_field_info("unknown", cut_field)
                assert_equal(frb[cut_field].info['data_source'], cut.__str__())
                assert_equal(frb[cut_field].info['axis'], 4)
                assert_equal(frb[cut_field].info['field'], cut_field)
                assert_equal(frb[cut_field].units, Unit(fi.units))
                assert_equal(frb[cut_field].info['xlim'], frb.bounds[:2])
                assert_equal(frb[cut_field].info['ylim'], frb.bounds[2:])
                assert_equal(frb[cut_field].info['length_to_cm'],
                             ds.length_unit.in_cgs())
                assert_equal(frb[cut_field].info['center'], cut.center)
    teardown_func(fns)
Exemplo n.º 11
0
 def __getitem__(self, key):
     if isinstance(key, string_types):
         key = getattr(dimensions, key)
     um = self.units_map
     if key not in um or um[key].dimensions is not key:
         units = _get_system_unit_string(key, self.units_map)
         self.units_map[key] = Unit(units, registry=self.registry)
     return self.units_map[key]
Exemplo n.º 12
0
    def setup_particle_fields(self, ptype, ftype="gas", num_neighbors=64):
        skip_output_units = ()
        for f, (units, aliases, dn) in sorted(self.known_particle_fields):
            units = self.ds.field_units.get((ptype, f), units)
            if (f in aliases or ptype not in self.ds.particle_types_raw
                ) and units not in skip_output_units:
                u = Unit(units, registry=self.ds.unit_registry)
                output_units = str(u.get_cgs_equivalent())
            else:
                output_units = units
            if (ptype, f) not in self.field_list:
                continue
            self.add_output_field(
                (ptype, f),
                sampling_type="particle",
                units=units,
                display_name=dn,
                output_units=output_units,
                take_log=False,
            )
            for alias in aliases:
                self.alias((ptype, alias), (ptype, f), units=output_units)

        ppos_fields = [f"particle_position_{ax}" for ax in "xyz"]
        pvel_fields = [f"particle_velocity_{ax}" for ax in "xyz"]
        particle_vector_functions(ptype, ppos_fields, pvel_fields, self)

        particle_deposition_functions(ptype, "particle_position",
                                      "particle_mass", self)
        standard_particle_fields(self, ptype)
        # Now we check for any leftover particle fields
        for field in sorted(self.field_list):
            if field in self:
                continue
            if not isinstance(field, tuple):
                raise RuntimeError
            if field[0] not in self.ds.particle_types:
                continue
            self.add_output_field(
                field,
                sampling_type="particle",
                units=self.ds.field_units.get(field, ""),
            )
        self.setup_smoothed_fields(ptype,
                                   num_neighbors=num_neighbors,
                                   ftype=ftype)
Exemplo n.º 13
0
    def _set_code_unit_attributes(self):
        """
        Generates the conversion to various physical _units
        based on the parameter file
        """

        # This should be improved.
        h5f = h5py.File(self.parameter_filename, mode="r")
        for field_name in h5f["/field_types"]:
            current_field = h5f[f"/field_types/{field_name}"]
            if "field_to_cgs" in current_field.attrs:
                field_conv = current_field.attrs["field_to_cgs"]
                self.field_units[field_name] = just_one(field_conv)
            elif "field_units" in current_field.attrs:
                field_units = current_field.attrs["field_units"]
                if isinstance(field_units, str):
                    current_field_units = current_field.attrs["field_units"]
                else:
                    current_field_units = just_one(
                        current_field.attrs["field_units"])
                self.field_units[field_name] = current_field_units.decode(
                    "utf8")
            else:
                self.field_units[field_name] = ""

        if "dataset_units" in h5f:
            for unit_name in h5f["/dataset_units"]:
                current_unit = h5f[f"/dataset_units/{unit_name}"]
                value = current_unit[()]
                unit = current_unit.attrs["unit"]
                # need to convert to a Unit object and check dimensions
                # because unit can be things like
                # 'dimensionless/dimensionless**3' so naive string
                # comparisons are insufficient
                unit = Unit(unit, registry=self.unit_registry)
                if unit_name.endswith(
                        "_unit") and unit.dimensions is sympy_one:
                    # Catch code units and if they are dimensionless,
                    # assign CGS units. setdefaultattr will catch code units
                    # which have already been set via units_override.
                    un = unit_name[:-5]
                    un = un.replace("magnetic", "magnetic_field_cgs", 1)
                    unit = unit_system_registry["cgs"][un]
                    setdefaultattr(self, unit_name, self.quan(value, unit))
                setdefaultattr(self, unit_name, self.quan(value, unit))
                if unit_name in h5f["/field_types"]:
                    if unit_name in self.field_units:
                        mylog.warning(
                            "'field_units' was overridden by 'dataset_units/%s'",
                            unit_name,
                        )
                    self.field_units[unit_name] = str(unit)
        else:
            setdefaultattr(self, "length_unit", self.quan(1.0, "cm"))
            setdefaultattr(self, "mass_unit", self.quan(1.0, "g"))
            setdefaultattr(self, "time_unit", self.quan(1.0, "s"))

        h5f.close()
Exemplo n.º 14
0
def test_dimensionless():
    """
    Create dimensionless unit and check attributes.

    """
    u1 = Unit()

    yield assert_true, u1.is_dimensionless
    yield assert_true, u1.expr == 1
    yield assert_true, u1.cgs_value == 1
    yield assert_true, u1.dimensions == 1

    u2 = Unit("")

    yield assert_true, u2.is_dimensionless
    yield assert_true, u2.expr == 1
    yield assert_true, u2.cgs_value == 1
    yield assert_true, u2.dimensions == 1
Exemplo n.º 15
0
    def alias(
        self,
        alias_name,
        original_name,
        units=None,
        deprecate: Optional[Tuple[str, str]] = None,
    ):
        """
        Alias one field to another field.

        Parameters
        ----------
        alias_name : Tuple[str]
            The new field name.
        original_name : Tuple[str]
            The field to be aliased.
        units : str
           A plain text string encoding the unit.  Powers must be in
           python syntax (** instead of ^). If set to "auto" the units
           will be inferred from the return value of the field function.
        deprecate : Tuple[str], optional
            If this is set, then the tuple contains two string version
            numbers: the first marking the version when the field was
            deprecated, and the second marking when the field will be
            removed.
        """
        if original_name not in self:
            return
        if units is None:
            # We default to CGS here, but in principle, this can be pluggable
            # as well.
            u = Unit(self[original_name].units, registry=self.ds.unit_registry)
            if u.dimensions is not dimensionless:
                units = str(self.ds.unit_system[u.dimensions])
            else:
                units = self[original_name].units
        self.field_aliases[alias_name] = original_name
        function = TranslationFunc(original_name)
        if deprecate is not None:
            self.add_deprecated_field(
                alias_name,
                function=function,
                sampling_type=self[original_name].sampling_type,
                display_name=self[original_name].display_name,
                units=units,
                since=deprecate[0],
                removal=deprecate[1],
                ret_name=original_name,
            )
        else:
            self.add_field(
                alias_name,
                function=function,
                sampling_type=self[original_name].sampling_type,
                display_name=self[original_name].display_name,
                units=units,
            )
Exemplo n.º 16
0
 def __getitem__(self, item):
     if item in self.data: return self.data[item]
     mylog.info("Making a fixed resolutuion buffer of (%s) %d by %d" % \
         (item, self.buff_size[0], self.buff_size[1]))
     dd = self.data_source
     width = self.ds.arr((self.bounds[1] - self.bounds[0],
                          self.bounds[3] - self.bounds[2],
                          self.bounds[5] - self.bounds[4]))
     buff = off_axis_projection(dd.ds, dd.center, dd.normal_vector,
                                width, dd.resolution, item,
                                weight=dd.weight_field, volume=dd.volume,
                                no_ghost=dd.no_ghost, interpolated=dd.interpolated,
                                north_vector=dd.north_vector)
     units = Unit(dd.ds.field_info[item].units, registry=dd.ds.unit_registry)
     if dd.weight_field is None:
         units *= Unit('cm', registry=dd.ds.unit_registry)
     ia = ImageArray(buff.swapaxes(0,1), input_units=units, info=self._get_info(item))
     self[item] = ia
     return ia 
Exemplo n.º 17
0
def test_create_new_symbol():
    """
    Create unit with unknown symbol.

    """
    u1 = Unit("abc", base_value=42, dimensions=(mass / time))

    assert_true(u1.expr == Symbol("abc", positive=True))
    assert_true(u1.base_value == 42)
    assert_true(u1.dimensions == mass / time)

    u1 = Unit("abc", base_value=42, dimensions=length**3)

    assert_true(u1.expr == Symbol("abc", positive=True))
    assert_true(u1.base_value == 42)
    assert_true(u1.dimensions == length**3)

    u1 = Unit("abc", base_value=42, dimensions=length * (mass * length))

    assert_true(u1.expr == Symbol("abc", positive=True))
    assert_true(u1.base_value == 42)
    assert_true(u1.dimensions == length**2 * mass)

    assert_raises(UnitParseError,
                  Unit,
                  'abc',
                  base_value=42,
                  dimensions=length**length)
    assert_raises(UnitParseError,
                  Unit,
                  'abc',
                  base_value=42,
                  dimensions=length**(length * length))
    assert_raises(UnitParseError,
                  Unit,
                  'abc',
                  base_value=42,
                  dimensions=length - mass)
    assert_raises(UnitParseError,
                  Unit,
                  'abc',
                  base_value=42,
                  dimensions=length + mass)
Exemplo n.º 18
0
    def __setstate__(self, state):
        """Pickle setstate method

        This is called inside pickle.read() and restores the unit data from the
        metadata extracted in __reduce__ and then serialized by pickle.
        """
        super(YTArray, self).__setstate__(state[1:])
        unit, lut = state[0]
        registry = UnitRegistry(lut=lut, add_default_symbols=False)
        self.units = Unit(unit, registry=registry)
Exemplo n.º 19
0
def test_slice(pf):
    fns = []
    grid_eps = np.finfo(np.float64).eps
    for nprocs in [8, 1]:
        # We want to test both 1 proc and 8 procs, to make sure that
        # parallelism isn't broken
        ds = fake_random_ds(64, nprocs=nprocs)
        dims = ds.domain_dimensions
        xn, yn, zn = ds.domain_dimensions
        dx = ds.arr(1.0 / (ds.domain_dimensions * 2), "code_length")
        xi, yi, zi = ds.domain_left_edge + dx
        xf, yf, zf = ds.domain_right_edge - dx
        coords = np.mgrid[xi:xf:xn * 1j, yi:yf:yn * 1j, zi:zf:zn * 1j]
        uc = [np.unique(c) for c in coords]
        slc_pos = 0.5
        # Some simple slice tests with single grids
        for ax, an in enumerate("xyz"):
            xax = ds.coordinates.x_axis[ax]
            yax = ds.coordinates.y_axis[ax]
            for wf in ["density", None]:
                slc = ds.slice(ax, slc_pos)
                shifted_slc = ds.slice(ax, slc_pos + grid_eps)
                assert_equal(slc["ones"].sum(), slc["ones"].size)
                assert_equal(slc["ones"].min(), 1.0)
                assert_equal(slc["ones"].max(), 1.0)
                assert_equal(np.unique(slc["px"]), uc[xax])
                assert_equal(np.unique(slc["py"]), uc[yax])
                assert_equal(np.unique(slc["pdx"]), 0.5 / dims[xax])
                assert_equal(np.unique(slc["pdy"]), 0.5 / dims[yax])
                pw = slc.to_pw(fields="density")
                for p in pw.plots.values():
                    tmpfd, tmpname = tempfile.mkstemp(suffix=".png")
                    os.close(tmpfd)
                    p.save(name=tmpname)
                    fns.append(tmpname)
                for width in [(1.0, "unitary"), 1.0,
                              ds.quan(0.5, "code_length")]:
                    frb = slc.to_frb((1.0, "unitary"), 64)
                    shifted_frb = shifted_slc.to_frb((1.0, "unitary"), 64)
                    for slc_field in ["ones", "density"]:
                        fi = ds._get_field_info(slc_field)
                        assert_equal(frb[slc_field].info["data_source"],
                                     slc.__str__())
                        assert_equal(frb[slc_field].info["axis"], ax)
                        assert_equal(frb[slc_field].info["field"], slc_field)
                        assert_equal(frb[slc_field].units, Unit(fi.units))
                        assert_equal(frb[slc_field].info["xlim"],
                                     frb.bounds[:2])
                        assert_equal(frb[slc_field].info["ylim"],
                                     frb.bounds[2:])
                        assert_equal(frb[slc_field].info["center"], slc.center)
                        assert_equal(frb[slc_field].info["coord"], slc_pos)
                        assert_equal(frb[slc_field], shifted_frb[slc_field])
            assert_equal(wf, None)
    teardown_func(fns)
Exemplo n.º 20
0
    def setup_particle_fields(self, ptype, ftype='gas', num_neighbors=64):
        skip_output_units = ("code_length", )
        for f, (units, aliases, dn) in sorted(self.known_particle_fields):
            units = self.ds.field_units.get((ptype, f), units)
            if (f in aliases or ptype not in self.ds.particle_types_raw) and \
                units not in skip_output_units:
                u = Unit(units, registry=self.ds.unit_registry)
                output_units = str(self.ds.unit_system[u.dimensions])
            else:
                output_units = units
            if (ptype, f) not in self.field_list:
                continue
            self.add_output_field((ptype, f),
                                  sampling_type="particle",
                                  units=units,
                                  display_name=dn,
                                  output_units=output_units)
            for alias in aliases:
                self.alias((ptype, alias), (ptype, f), units=output_units)

        # We'll either have particle_position or particle_position_[xyz]
        if (ptype, "particle_position") in self.field_list or \
           (ptype, "particle_position") in self.field_aliases:
            particle_scalar_functions(ptype, "particle_position",
                                      "particle_velocity", self)
        else:
            # We need to check to make sure that there's a "known field" that
            # overlaps with one of the vector fields.  For instance, if we are
            # in the Stream frontend, and we have a set of scalar position
            # fields, they will overlap with -- and be overridden by -- the
            # "known" vector field that the frontend creates.  So the easiest
            # thing to do is to simply remove the on-disk field (which doesn't
            # exist) and replace it with a derived field.
            if (ptype, "particle_position") in self and \
                 self[ptype, "particle_position"]._function == NullFunc:
                self.pop((ptype, "particle_position"))
            particle_vector_functions(
                ptype, ["particle_position_%s" % ax for ax in 'xyz'],
                ["particle_velocity_%s" % ax for ax in 'xyz'], self)
        particle_deposition_functions(ptype, "particle_position",
                                      "particle_mass", self)
        standard_particle_fields(self, ptype)
        # Now we check for any leftover particle fields
        for field in sorted(self.field_list):
            if field in self: continue
            if not isinstance(field, tuple):
                raise RuntimeError
            if field[0] not in self.ds.particle_types:
                continue
            self.add_output_field(field,
                                  sampling_type="particle",
                                  units=self.ds.field_units.get(field, ""))
        self.setup_smoothed_fields(ptype,
                                   num_neighbors=num_neighbors,
                                   ftype=ftype)
Exemplo n.º 21
0
def test_create_fail_on_base_value_type():
    """
    Fail to create unit with bad base_value type.

    """
    try:
        Unit("a", base_value="a", dimensions=(mass / time))
    except UnitParseError:
        assert_true(True)
    else:
        assert_true(False)
Exemplo n.º 22
0
def test_create_fail_on_bad_dimensions_type():
    """
    Fail to create unit with bad dimensions type.

    """
    try:
        Unit("a", base_value=1, dimensions="(mass)")
    except UnitParseError:
        assert_true(True)
    else:
        assert_true(False)
Exemplo n.º 23
0
def test_create_fail_on_bad_symbol_type():
    """
    Fail to create unit with bad symbol type.

    """
    try:
        Unit([1])  # something other than Expr and str
    except UnitParseError:
        assert_true(True)
    else:
        assert_true(False)
Exemplo n.º 24
0
def test_create_fail_on_unknown_symbol():
    """
    Fail to create unit with unknown symbol, without base_value and dimensions.

    """
    try:
        Unit(Symbol("jigawatts"))
    except UnitParseError:
        assert_true(True)
    else:
        assert_true(False)
Exemplo n.º 25
0
def setup_magnetic_field_aliases(registry, ds_ftype, ds_fields, ftype="gas"):
    r"""
    This routine sets up special aliases between dataset-specific magnetic fields
    and the default magnetic fields in yt so that unit conversions between different
    unit systems can be handled properly. This is only called from the `setup_fluid_fields`
    method of a frontend's :class:`FieldInfoContainer` instance.

    Parameters
    ----------
    registry : :class:`FieldInfoContainer`
        The field registry that these definitions will be installed into.
    ds_ftype : string
        The field type for the fields we're going to alias, e.g. "flash", "enzo", "athena", etc.
    ds_fields : list of strings
        The fields that will be aliased.
    ftype : string, optional
        The resulting field type of the fields. Default "gas".

    Examples
    --------
    >>> class PlutoFieldInfo(ChomboFieldInfo):
    ...     def setup_fluid_fields(self):
    ...         from yt.fields.magnetic_field import \
    ...             setup_magnetic_field_aliases
    ...         setup_magnetic_field_aliases(self, "chombo", ["bx%s" % ax for ax in [1,2,3]])
    """
    unit_system = registry.ds.unit_system
    ds_fields = [(ds_ftype, fd) for fd in ds_fields]
    if ds_fields[0] not in registry:
        return
    from_units = Unit(registry[ds_fields[0]].units,
                      registry=registry.ds.unit_registry)
    if dimensions.current_mks in unit_system.base_units:
        to_units = unit_system["magnetic_field_mks"]
        equiv = "SI"
    else:
        to_units = unit_system["magnetic_field_cgs"]
        equiv = "CGS"
    if from_units.dimensions == to_units.dimensions:
        convert = lambda x: x.in_units(to_units)
    else:
        convert = lambda x: x.to_equivalent(to_units, equiv)

    def mag_field(fd):
        def _mag_field(field, data):
            return convert(data[fd])

        return _mag_field

    for ax, fd in zip(registry.ds.coordinates.axis_order, ds_fields):
        registry.add_field((ftype, "magnetic_field_%s" % ax),
                           sampling_type="cell",
                           function=mag_field(fd),
                           units=unit_system[to_units.dimensions])
Exemplo n.º 26
0
def test_create_fail_on_bad_dimensions_type():
    """
    Fail to create unit with bad dimensions type.

    """
    try:
        u1 = Unit("a", cgs_value=1, dimensions="(mass)")
    except UnitParseError:
        yield assert_true, True
    else:
        yield assert_true, False
Exemplo n.º 27
0
def test_create_fail_on_cgs_value_type():
    """
    Fail to create unit with bad cgs_value type.

    """
    try:
        u1 = Unit("a", cgs_value="a", dimensions=(mass/time))
    except UnitParseError:
        yield assert_true, True
    else:
        yield assert_true, False
Exemplo n.º 28
0
    def _set_code_unit_attributes(self):
        """
        Generates the conversion to various physical _units
        based on the parameter file
        """

        # This should be improved.
        h5f = h5py.File(self.parameter_filename, "r")
        for field_name in h5f["/field_types"]:
            current_field = h5f["/field_types/%s" % field_name]
            if 'field_to_cgs' in current_field.attrs:
                field_conv = current_field.attrs['field_to_cgs']
                self.field_units[field_name] = just_one(field_conv)
            elif 'field_units' in current_field.attrs:
                field_units = current_field.attrs['field_units']
                if isinstance(field_units, string_types):
                    current_field_units = current_field.attrs['field_units']
                else:
                    current_field_units = \
                        just_one(current_field.attrs['field_units'])
                self.field_units[field_name] = current_field_units.decode(
                    "utf8")
            else:
                self.field_units[field_name] = ""

        if "dataset_units" in h5f:
            for unit_name in h5f["/dataset_units"]:
                current_unit = h5f["/dataset_units/%s" % unit_name]
                value = current_unit.value
                unit = current_unit.attrs["unit"]
                # need to convert to a Unit object and check dimensions
                # because unit can be things like
                # 'dimensionless/dimensionless**3' so naive string
                # comparisons are insufficient
                unit = Unit(unit, registry=self.unit_registry)
                if unit_name.endswith(
                        '_unit') and unit.dimensions is sympy_one:
                    un = unit_name[:-5]
                    un = un.replace('magnetic', 'magnetic_field', 1)
                    unit = self.unit_system[un]
                    setdefaultattr(self, unit_name, self.quan(value, unit))
                setdefaultattr(self, unit_name, self.quan(value, unit))
                if unit_name in h5f["/field_types"]:
                    if unit_name in self.field_units:
                        mylog.warning(
                            "'field_units' was overridden by 'dataset_units/%s'"
                            % (unit_name))
                    self.field_units[unit_name] = str(unit)
        else:
            setdefaultattr(self, 'length_unit', self.quan(1.0, "cm"))
            setdefaultattr(self, 'mass_unit', self.quan(1.0, "g"))
            setdefaultattr(self, 'time_unit', self.quan(1.0, "s"))

        h5f.close()
Exemplo n.º 29
0
 def _sanitize_dimensions(self, item):
     field = self.data_source._determine_fields(item)[0]
     finfo = self.data_source.ds.field_info[field]
     dimensions = Unit(
         finfo.units, registry=self.data_source.ds.unit_registry).dimensions
     if dimensions not in self.known_dimensions:
         self.known_dimensions[dimensions] = item
         ret_item = item
     else:
         ret_item = self.known_dimensions[dimensions]
     return ret_item
Exemplo n.º 30
0
def test_create_fail_on_unknown_symbol():
    """
    Fail to create unit with unknown symbol, without cgs_value and dimensions.

    """
    try:
        u1 = Unit(Symbol("jigawatts"))
    except UnitParseError:
        yield assert_true, True
    else:
        yield assert_true, False
Exemplo n.º 31
0
def test_dimensionless():
    """
    Create dimensionless unit and check attributes.

    """
    u1 = Unit()

    assert_true(u1.is_dimensionless)
    assert_true(u1.expr == 1)
    assert_true(u1.base_value == 1)
    assert_true(u1.dimensions == 1)

    u2 = Unit("")

    assert_true(u2.is_dimensionless)
    assert_true(u2.expr == 1)
    assert_true(u2.base_value == 1)
    assert_true(u2.dimensions == 1)

    assert_equal(u1.latex_repr, '')
    assert_equal(u2.latex_repr, '')
Exemplo n.º 32
0
def setup_gradient_fields(registry, grad_field, field_units, slice_info=None):

    geom = registry.ds.geometry
    if is_curvilinear(geom):
        mylog.warning(
            "In %s geometry, gradient fields may contain artifacts near cartesian axes."
            % geom)

    assert (isinstance(grad_field, tuple))
    ftype, fname = grad_field
    if slice_info is None:
        sl_left = slice(None, -2, None)
        sl_right = slice(2, None, None)
        div_fac = 2.0
    else:
        sl_left, sl_right, div_fac = slice_info
    slice_3d = (slice(1, -1), slice(1, -1), slice(1, -1))

    def grad_func(axi, ax):
        slice_3dl = slice_3d[:axi] + (sl_left, ) + slice_3d[axi + 1:]
        slice_3dr = slice_3d[:axi] + (sl_right, ) + slice_3d[axi + 1:]

        def func(field, data):
            ds = div_fac * data[ftype, "d%s" % ax]
            if ax == "theta":
                ds *= data[ftype, "r"]
            if ax == "phi":
                ds *= data[ftype, "r"] * np.sin(data[ftype, "theta"])
            f = data[grad_field][slice_3dr] / ds[slice_3d]
            f -= data[grad_field][slice_3dl] / ds[slice_3d]
            new_field = np.zeros_like(data[grad_field], dtype=np.float64)
            new_field = data.ds.arr(new_field, f.units)
            new_field[slice_3d] = f
            return new_field

        return func

    field_units = Unit(field_units, registry=registry.ds.unit_registry)
    grad_units = field_units / registry.ds.unit_system["length"]

    for axi, ax in enumerate(registry.ds.coordinates.axis_order):
        f = grad_func(axi, ax)
        registry.add_field((ftype, "%s_gradient_%s" % (fname, ax)),
                           sampling_type="local",
                           function=f,
                           validators=[ValidateSpatial(1, [grad_field])],
                           units=grad_units)

    create_magnitude_field(registry,
                           "%s_gradient" % fname,
                           grad_units,
                           ftype=ftype,
                           validators=[ValidateSpatial(1, [grad_field])])
Exemplo n.º 33
0
    def set_field_unit(self, field, new_unit):
        """Sets a new unit for the requested field

        Parameters
        ----------
        field : string or field tuple
           The name of the field that is to be changed.

        new_unit : string or Unit object
           The name of the new unit.
        """
        if field in self.field_units:
            self.field_units[field] = \
                Unit(new_unit, registry=self.ds.unit_registry)
        else:
            fd = self.field_map[field]
            if fd in self.field_units:
                self.field_units[fd] = \
                    Unit(new_unit, registry=self.ds.unit_registry)
            else:
                raise KeyError("%s not in profile!" % (field))
    def get_label(self, projected=False):
        """
        Return a data label for the given field, including units.
        """
        name = self.name[1]
        if self.display_name is not None:
            name = self.display_name

        # Start with the field name
        data_label = r"$\rm{%s}" % name

        # Grab the correct units
        if projected:
            raise NotImplementedError
        else:
            units = Unit(self.units)
        # Add unit label
        if not units.is_dimensionless:
            data_label += r"\ \ (%s)" % (units.latex_representation())

        data_label += r"$"
        return data_label
 def get_units(self):
     u = Unit(self.units)
     return u.latex_representation()
Exemplo n.º 36
0
 def __array_wrap__(self, out_arr, context=None):
     ret = super(YTArray, self).__array_wrap__(out_arr, context)
     if isinstance(ret, YTQuantity) and ret.shape != ():
         ret = ret.view(YTArray)
     if context is None:
         if ret.shape == ():
             return ret[()]
         else:
             return ret
     elif context[0] in unary_operators:
         u = getattr(context[1][0], "units", None)
         if u is None:
             u = NULL_UNIT
         unit = self._ufunc_registry[context[0]](u)
         ret_class = type(self)
     elif context[0] in binary_operators:
         oper1 = coerce_iterable_units(context[1][0])
         oper2 = coerce_iterable_units(context[1][1])
         cls1 = type(oper1)
         cls2 = type(oper2)
         unit1 = getattr(oper1, "units", None)
         unit2 = getattr(oper2, "units", None)
         ret_class = get_binary_op_return_class(cls1, cls2)
         if unit1 is None:
             unit1 = Unit(registry=getattr(unit2, "registry", None))
         if unit2 is None and context[0] is not power:
             unit2 = Unit(registry=getattr(unit1, "registry", None))
         elif context[0] is power:
             unit2 = oper2
             if isinstance(unit2, np.ndarray):
                 if isinstance(unit2, YTArray):
                     if unit2.units.is_dimensionless:
                         pass
                     else:
                         raise YTUnitOperationError(context[0], unit1, unit2)
                 unit2 = 1.0
         unit_operator = self._ufunc_registry[context[0]]
         if unit_operator in (preserve_units, comparison_unit, arctan2_unit):
             if unit1 != unit2:
                 if not unit1.same_dimensions_as(unit2):
                     raise YTUnitOperationError(context[0], unit1, unit2)
                 else:
                     raise YTUfuncUnitError(context[0], unit1, unit2)
         unit = self._ufunc_registry[context[0]](unit1, unit2)
         if unit_operator in (multiply_units, divide_units):
             if unit.is_dimensionless and unit.base_value != 1.0:
                 if not unit1.is_dimensionless:
                     if unit1.dimensions == unit2.dimensions:
                         np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out_arr)
                         unit = Unit(registry=unit.registry)
     else:
         raise RuntimeError("Support for the %s ufunc has not been added " "to YTArray." % str(context[0]))
     if unit is None:
         out_arr = np.array(out_arr, copy=False)
         return out_arr
     out_arr.units = unit
     if out_arr.size == 1:
         return YTQuantity(np.array(out_arr), unit)
     else:
         if ret_class is YTQuantity:
             # This happens if you do ndarray * YTQuantity. Explicitly
             # casting to YTArray avoids creating a YTQuantity with size > 1
             return YTArray(np.array(out_arr), unit)
         return ret_class(np.array(out_arr, copy=False), unit)
 def get_projected_units(self):
     u = Unit(self.units)*Unit('cm')
     return u.latex_representation()