def test_quantity_1arg(self): q = Quantity("123 foo") self.assertEqual(str(q.units), "foo") self.assertEqual(q.to("bar").magnitude, 246) q = pickle.loads(pickle.dumps(q)) self.assertEqual(str(q.units), "foo") self.assertEqual(q.to("bar").magnitude, 246)
def _get_conversion_factor(origin_unit: Enum, si_unit: Enum) -> Tuple[Union[operator.mul, operator.add], float]: """ Method to get the conversion factor (flaot) for a specific parameter :param origin_unit: origin unit enumeration of parameter :param si_unit: si unit enumeration of parameter :return: conversion factor as float """ if si_unit == SIUnit.KILOGRAM_PER_SQUARE_METER.value: # Fixed conversion factors to kg / m², as it only applies # for water with density 1 g / cm³ if origin_unit == OriginUnit.MILLIMETER.value: return operator.mul, 1 else: raise ValueError("manually set conversion factor for precipitation unit") elif si_unit == SIUnit.DEGREE_KELVIN.value: # Apply offset addition to temperature measurements # Take 0 as this is appropriate for adding on other numbers # (just the difference) degree_offset = Quantity(0, origin_unit).to(si_unit).magnitude return operator.add, degree_offset elif si_unit == SIUnit.PERCENT.value: factor = REGISTRY(str(origin_unit)).to(str(si_unit)).magnitude return operator.mul, factor else: # For multiplicative units we need to use 1 as quantity to apply the # appropriate factor factor = Quantity(1, origin_unit).to(si_unit).magnitude return operator.mul, factor
def test_quantity_1arg(self): q = Quantity("123 kg") self.assertEqual(str(q.units), "kilogram") self.assertEqual(q.to("t").magnitude, 0.123) q = pickle.loads(pickle.dumps(q)) self.assertEqual(str(q.units), "kilogram") self.assertEqual(q.to("t").magnitude, 0.123)
def test_quantity_2args(self, protocol): q = Quantity(123, "kg") assert str(q.units) == "kilogram" assert q.to("t").magnitude == 0.123 q = pickle.loads(pickle.dumps(q, protocol)) assert str(q.units) == "kilogram" assert q.to("t").magnitude == 0.123
def test_pickle_definition_syntax_error(self): # OffsetUnitCalculusError raised from a custom ureg must be pickleable even if # the ureg is not registered as the application ureg ureg = UnitRegistry(filename=None) ureg.define("foo = [bar]") ureg.define("bar = 2 foo") q1 = ureg.Quantity("1 foo") q2 = ureg.Quantity("1 bar") for protocol in range(pickle.HIGHEST_PROTOCOL + 1): for ex in [ DefinitionSyntaxError("foo", filename="a.txt", lineno=123), RedefinitionError("foo", "bar"), UndefinedUnitError("meter"), DimensionalityError("a", "b", "c", "d", extra_msg=": msg"), OffsetUnitCalculusError( Quantity("1 kg")._units, Quantity("1 s")._units), OffsetUnitCalculusError(q1._units, q2._units), ]: with self.subTest(protocol=protocol, etype=type(ex)): pik = pickle.dumps(ureg.Quantity("1 foo"), protocol) with self.assertRaises(UndefinedUnitError): pickle.loads(pik) # assert False, ex.__reduce__() ex2 = pickle.loads(pickle.dumps(ex, protocol)) assert type(ex) is type(ex2) self.assertEqual(ex.args, ex2.args) self.assertEqual(ex.__dict__, ex2.__dict__) self.assertEqual(str(ex), str(ex2))
def test_quantity_2args(self, protocol): q = Quantity(123, "kg") self.assertEqual(str(q.units), "kilogram") self.assertEqual(q.to("t").magnitude, 0.123) q = pickle.loads(pickle.dumps(q, protocol)) self.assertEqual(str(q.units), "kilogram") self.assertEqual(q.to("t").magnitude, 0.123)
def test_quantity_2args(self, protocol): q = Quantity(123, "foo") assert str(q.units) == "foo" assert q.to("bar").magnitude == 246 q = pickle.loads(pickle.dumps(q, protocol)) assert str(q.units) == "foo" assert q.to("bar").magnitude == 246
def test_quantity_2args(self, protocol): q = Quantity(123, "foo") self.assertEqual(str(q.units), "foo") self.assertEqual(q.to("bar").magnitude, 246) q = pickle.loads(pickle.dumps(q, protocol)) self.assertEqual(str(q.units), "foo") self.assertEqual(q.to("bar").magnitude, 246)
def get_divergence_from_beam_diameter(E, beam_diameter_fwhm): """Calculate the divergence (radian) from photon energy (eV) and beam_diameter (m)""" # The rms of the amplitude distribution (Gaussian) E = Quantity(E, Unit("eV")) beam_waist = beam_diameter_fwhm / np.sqrt(2.0 * np.log(2.0)) theta = 2.0 * hbar * c / beam_waist / E.to("joule").magnitude return float(theta)
def test_measurement_2args(self): m = Measurement(Quantity(123, "foo"), Quantity(10, "bar")) self.assertEqual(m.value.magnitude, 123) self.assertEqual(m.error.magnitude, 5) self.assertEqual(str(m.units), "foo") m = pickle.loads(pickle.dumps(m)) self.assertEqual(m.value.magnitude, 123) self.assertEqual(m.error.magnitude, 5) self.assertEqual(str(m.units), "foo")
def test_measurement_2args(self, protocol): m = Measurement(Quantity(123, "foo"), Quantity(10, "bar")) assert m.value.magnitude == 123 assert m.error.magnitude == 5 assert str(m.units) == "foo" m = pickle.loads(pickle.dumps(m, protocol)) assert m.value.magnitude == 123 assert m.error.magnitude == 5 assert str(m.units) == "foo"
def test_measurement_2args(self, protocol): m = Measurement(Quantity(123, "kg"), Quantity(15, "kg")) self.assertEqual(m.value.magnitude, 123) self.assertEqual(m.error.magnitude, 15) self.assertEqual(str(m.units), "kilogram") m = pickle.loads(pickle.dumps(m, protocol)) self.assertEqual(m.value.magnitude, 123) self.assertEqual(m.error.magnitude, 15) self.assertEqual(str(m.units), "kilogram")
def test_measurement_2args(self, protocol): m = Measurement(Quantity(123, "kg"), Quantity(15, "kg")) assert m.value.magnitude == 123 assert m.error.magnitude == 15 assert str(m.units) == "kilogram" m = pickle.loads(pickle.dumps(m, protocol)) assert m.value.magnitude == 123 assert m.error.magnitude == 15 assert str(m.units) == "kilogram"
def test_offset_unit_calculus_error(self): ex = OffsetUnitCalculusError(Quantity("1 kg")._units) assert (str( ex) == "Ambiguous operation with offset unit (kilogram). See " + OFFSET_ERROR_DOCS_HTML + " for guidance.") ex = OffsetUnitCalculusError( Quantity("1 kg")._units, Quantity("1 s")._units) assert ( str(ex) == "Ambiguous operation with offset unit (kilogram, second). See " + OFFSET_ERROR_DOCS_HTML + " for guidance.")
def test_offset_unit_calculus_error(self): ex = OffsetUnitCalculusError(Quantity("1 kg")._units) self.assertEqual( str(ex), "Ambiguous operation with offset unit (kilogram). See " "https://pint.readthedocs.io/en/latest/nonmult.html for guidance.", ) ex = OffsetUnitCalculusError(Quantity("1 kg")._units, Quantity("1 s")._units) self.assertEqual( str(ex), "Ambiguous operation with offset unit (kilogram, second). See " "https://pint.readthedocs.io/en/latest/nonmult.html for guidance.", )
def test_logarithmic_unit_calculus_error(self): Quantity = UnitRegistry(autoconvert_offset_to_baseunit=True).Quantity ex = LogarithmicUnitCalculusError(Quantity("1 dB")._units) assert (str(ex) == "Ambiguous operation with logarithmic unit (decibel). See " + LOG_ERROR_DOCS_HTML + " for guidance.") ex = LogarithmicUnitCalculusError( Quantity("1 dB")._units, Quantity("1 octave")._units) assert ( str(ex) == "Ambiguous operation with logarithmic unit (decibel, octave). See " + LOG_ERROR_DOCS_HTML + " for guidance.")
def test_measurement_2args(self, protocol): set_application_registry(self.ureg1) m1 = Measurement(Quantity(10, "foo"), Quantity(1, "foo")) set_application_registry(self.ureg2) m2 = Measurement(Quantity(10, "foo"), Quantity(1, "foo")) m3 = pickle.loads(pickle.dumps(m1, protocol)) assert m1.dimensionality == {"[dim1]": 1} assert m2.dimensionality == {"[dim2]": 1} assert m3.dimensionality == {"[dim2]": 1} assert m1.to("bar").value.magnitude == 20 assert m2.to("bar").value.magnitude == 30 assert m3.to("bar").value.magnitude == 30 assert m1.to("bar").error.magnitude == 2 assert m2.to("bar").error.magnitude == 3 assert m3.to("bar").error.magnitude == 3
def translated(self, xyz: pint.Quantity) -> LeafCloud: """ Return a copy of self translated by the vector ``xyz``. Parameters ---------- xyz : :class:`pint.Quantity` A 3-vector or a (N, 3)-array by which leaves will be translated. If (N, 3) variant is used, the array shape must match that of ``leaf_positions``. Returns ------- :class:`LeafCloud` Translated copy of self. Raises ------ ValueError Sizes of ``xyz`` and ``self.leaf_positions`` are incompatible. """ if xyz.ndim <= 1: xyz = xyz.reshape((1, 3)) elif xyz.shape != self.leaf_positions.shape: raise ValueError( f"shapes xyz {xyz.shape} and self.leaf_positions " f"{self.leaf_positions.shape} do not match" ) return attr.evolve(self, leaf_positions=self.leaf_positions + xyz)
def add_window_glazing_material(idf: IDF, idf_obj_name: str, mat_def: TransparentMaterial, thickness: pint.Quantity, ureg: pint.UnitRegistry) -> None: idf_obj_type = idf_strings.IDFObjects.win_material_glazing if not idf_writing_helpers.exists_in_idf(idf, idf_obj_type, idf_obj_name): win_glazing = idf.newidfobject(idf_obj_type) win_glazing.Name = idf_obj_name win_glazing.Thickness = thickness.to(ureg.m).m win_glazing.Optical_Data_Type = idf_strings.WindowMatGlazing.optical_data_type win_glazing.Solar_Transmittance_at_Normal_Incidence = mat_def.solar_transmittance.to( ureg.dimensionless).m win_glazing.Front_Side_Solar_Reflectance_at_Normal_Incidence = mat_def.front_side_solar_reflectance.to( ureg.dimensionless).m win_glazing.Back_Side_Solar_Reflectance_at_Normal_Incidence = mat_def.back_side_solar_reflectance.to( ureg.dimensionless).m win_glazing.Visible_Transmittance_at_Normal_Incidence = mat_def.visible_transmittance.to( ureg.dimensionless).m win_glazing.Front_Side_Visible_Reflectance_at_Normal_Incidence = mat_def.front_side_visible_reflectance.to( ureg.dimensionless).m win_glazing.Back_Side_Visible_Reflectance_at_Normal_Incidence = mat_def.back_side_visible_reflectance.to( ureg.dimensionless).m win_glazing.Infrared_Transmittance_at_Normal_Incidence = mat_def.infrared_transmittance.to( ureg.dimensionless).m win_glazing.Front_Side_Infrared_Hemispherical_Emissivity = mat_def.front_side_infrared_hemispherical_emissivity.to( ureg.dimensionless).m win_glazing.Back_Side_Infrared_Hemispherical_Emissivity = mat_def.back_side_infrared_hemispherical_emissivity.to( ureg.dimensionless).m win_glazing.Conductivity = mat_def.conductivity.to(ureg.W / (ureg.m * ureg.K)).m win_glazing.Dirt_Correction_Factor_for_Solar_and_Visible_Transmittance = mat_def.dirt_correction_factor.to( ureg.dimensionless).m
def add_material_no_mass(idf: IDF, idf_obj_name: str, mat_def: OpaqueMaterial, thermal_resistance: pint.Quantity, ureg: pint.UnitRegistry) -> None: """ For materials without mass specified (e.g. vapour barriers) :param idf: :param idf_obj_name: :param mat_def: :param thermal_resistance: :param ureg: :return: """ if not idf_writing_helpers.exists_in_idf( idf, idf_strings.IDFObjects.material_no_mass, idf_obj_name): idf_mat = idf.newidfobject(idf_strings.IDFObjects.material_no_mass) idf_mat.Name = idf_obj_name idf_mat.Roughness = get_idf_roughness_string_for(mat_def.roughness) idf_mat.Thermal_Resistance = thermal_resistance.to( (ureg.m**2 * ureg.K) / ureg.W).m idf_mat.Thermal_Absorptance = mat_def.thermal_absorptance.to( ureg.dimensionless).m idf_mat.Solar_Absorptance = mat_def.solar_absorptance.to( ureg.dimensionless).m idf_mat.Visible_Absorptance = mat_def.visible_absorptance.to( ureg.dimensionless).m
def add_opaque_material(idf: IDF, idf_obj_name: str, mat_def: OpaqueMaterial, thickness: pint.Quantity, ureg: pint.UnitRegistry) -> None: """ For materials without mass specified (e.g. vapour barriers) thickness is not :param idf: :param idf_obj_name: :param mat_def: :param thickness: :param ureg: :return: """ if not idf_writing_helpers.exists_in_idf( idf, idf_strings.IDFObjects.material, idf_obj_name): assert mat_def.is_mass_fully_specified( ), f"trying to add opaque material {mat_def.name}, but no mass properties specified" idf_mat = idf.newidfobject(idf_strings.IDFObjects.material) idf_mat.Name = idf_obj_name idf_mat.Roughness = get_idf_roughness_string_for(mat_def.roughness) idf_mat.Thickness = thickness.to(ureg.m).m idf_mat.Conductivity = mat_def.conductivity.to(ureg.W / (ureg.m * ureg.K)).m # checking for mass properties at begin, thus ignore type warning due to Optional[] declaration idf_mat.Density = mat_def.density.to(ureg.kg / ureg.m**3).m # type: ignore idf_mat.Specific_Heat = mat_def.specific_heat.to( ureg.J / (ureg.kg * ureg.K)).m # type: ignore idf_mat.Thermal_Absorptance = mat_def.thermal_absorptance.to( ureg.dimensionless).m idf_mat.Solar_Absorptance = mat_def.solar_absorptance.to( ureg.dimensionless).m idf_mat.Visible_Absorptance = mat_def.visible_absorptance.to( ureg.dimensionless).m
def aggregate_chunks(existing_chunks: Iterable[int], item_size: int, subdivision: int = 1): target_size_bytes = int( Quantity(config.get("array.chunk-size")).m_as("bytes")) # The optimal number of data per Dask chunk. target_size = target_size_bytes // item_size # Try to aggregate the input data into the fewest possible Dask chunks. new_chunks = [] for chunk in existing_chunks: # If this input data set will fit into the current chunk, add it. if new_chunks and new_chunks[-1] + chunk <= target_size: new_chunks[-1] += chunk # If the current chunk is full (or the chunks list is empty), add this # data set to the next chunk. elif chunk <= target_size: new_chunks.append(chunk) # If this data set is larger than the max Dask chunk size, split it # along the HDF5 data set chunk boundaries and put the pieces in # separate Dask chunks. else: n_whole_chunks, remainder = divmod(chunk, target_size) dask_chunk_size = target_size // subdivision * subdivision new_chunks += [dask_chunk_size] * n_whole_chunks + [remainder] return new_chunks
def test_construction_discrete(data: pint.Quantity, time, interpolation, shape_exp): """Test the construction of the TimeSeries class.""" # set expected values time_exp = time if isinstance(time_exp, pint.Quantity): time_exp = pd.TimedeltaIndex(time_exp.m, unit="s") exp_interpolation = interpolation if len(data.shape) == 0 and interpolation is None: exp_interpolation = "step" # create instance ts = TimeSeries(data=data, time=time, interpolation=interpolation) # check assert np.all(ts.data == data) assert np.all(ts.time == time_exp) assert ts.interpolation == exp_interpolation assert ts.shape == shape_exp assert data.is_compatible_with(ts.units) assert np.all(ts.data_array.data == data) assert ts.data_array.attrs["interpolation"] == exp_interpolation if time_exp is None: assert "time" not in ts.data_array else: assert np.all(ts.data_array.time == time_exp)
def get_bin(self, quantity: str) -> int: try: ph_value = self.string_to_ph(quantity) return self.quantity_binner.get_bin_index( Quantity(ph_value, u.dimensionless)) except ValueError as e: raise BinningError(quantity) from e
def test_measurement_2args(self): set_application_registry(self.ureg1) m1 = Measurement(Quantity(10, "foo"), Quantity(1, "foo")) set_application_registry(self.ureg2) m2 = Measurement(Quantity(10, "foo"), Quantity(1, "foo")) m3 = pickle.loads(pickle.dumps(m1)) assert m1.dimensionality == {"[dim1]": 1} assert m2.dimensionality == {"[dim2]": 1} assert m3.dimensionality == {"[dim2]": 1} self.assertEqual(m1.to("bar").value.magnitude, 20) self.assertEqual(m2.to("bar").value.magnitude, 30) self.assertEqual(m3.to("bar").value.magnitude, 30) self.assertEqual(m1.to("bar").error.magnitude, 2) self.assertEqual(m2.to("bar").error.magnitude, 3) self.assertEqual(m3.to("bar").error.magnitude, 3)
def test_datax(self): """ Test that it writes a number of variables correctly """ f = io.StringIO() printvariables(f, a = "Literal string", b = 3.141592, c = (3.141592,"\\meter"), d = (3.141592,"\\meter","%.2g"), e = (3.141592,"%.2g"), f = Quantity(3.141592,"\\meter"), ) f.seek(0) written = f.read() target ="""\ % File auto-generated by LaTeXDatax.py. Will be overwritten. \\pgfkeyssetvalue{/datax/a}{Literal string} \\pgfkeyssetvalue{/datax/b}{\\num{3.142}} \\pgfkeyssetvalue{/datax/c}{\\qty{3.142}{\\meter}} \\pgfkeyssetvalue{/datax/d}{\\qty{3.1}{\\meter}} \\pgfkeyssetvalue{/datax/e}{\\num{3.1}} \\pgfkeyssetvalue{/datax/f}{\\SI[]{3.141592}{\\meter}} """ self.assertEqual(written,target)
def extract_temperature(self, temperature: str) -> RxnQuantity: try: vue = get_vue(temperature) return RxnQuantity( Quantity(temperature_to_float(temperature, vue), u.degC)) except VUEParseError as e: raise TemperatureExtractionError(temperature) from e
def eval_albedo_mono(self, w: pint.Quantity) -> pint.Quantity: with xr.open_dataset(self.dataset) as ds: wavelengths = w.m_as(ds.w.attrs["units"]) interpolated_albedo = ds.albedo.interp(w=wavelengths) albedo = to_quantity(interpolated_albedo) albedo_array = albedo * np.ones(self.n_layers) return albedo_array
def add_airgap(idf: IDF, idf_obj_name: str, thermal_resistance: pint.Quantity, ureg: pint.UnitRegistry) -> None: if not idf_writing_helpers.exists_in_idf( idf, idf_strings.IDFObjects.material_air_gap, idf_obj_name): idf_mat = idf.newidfobject(idf_strings.IDFObjects.material_air_gap) idf_mat.Name = idf_obj_name idf_mat.Thermal_Resistance = thermal_resistance.to(ureg.m**2 * ureg.K / ureg.W).m
def digitize(self, quantity: Quantity) -> int: """ Does the same as numpy.digitize, which is not compatible with pint.Quantity """ if quantity.dimensionality != self.dimensionality: raise BinningError('Incompatible dimensionality') value = quantity.to_base_units().magnitude return int(np.digitize(value, self.unitless_bin_boundaries))