def init_attributes(self, *, spatial_discretisation, spectral_discretisation, kappa, enable_temperatures=False, rtol=default_rtol): # TODO move to one method super().sync() self.notify() attributes = {} with np.errstate(all='raise'): positions = spatial_discretisation.sample(self.mesh.grid, self.core.n_sd) attributes['cell id'], attributes['cell origin'], attributes['position in cell'] = \ self.mesh.cellular_attributes(positions) r_dry, n_per_kg = spectral_discretisation.sample(self.core.n_sd) r_wet = r_wet_init(r_dry, self, attributes['cell id'], kappa, rtol) rhod = self['rhod'].to_ndarray() cell_id = attributes['cell id'] domain_volume = np.prod(np.array(self.mesh.size)) if enable_temperatures: attributes['temperature'] = temperature_init( self, attributes['cell id']) attributes['n'] = discretise_n(n_per_kg * rhod[cell_id] * domain_volume) attributes['volume'] = phys.volume(radius=r_wet) attributes['dry volume'] = phys.volume(radius=r_dry) return attributes
def test_moment_0d(): # Arrange n_part = 10000 v_mean = 2e-6 d = 1.2 v_min = 0.01e-6 v_max = 10e-6 n_sd = 32 spectrum = Lognormal(n_part, v_mean, d) v, n = linear(n_sd, spectrum, (v_min, v_max)) T = np.full_like(v, 300.) n = discretise_n(n) particles = DummyCore(backend, n_sd) attribute = {'n': n, 'volume': v, 'temperature': T} particles.build(attribute) state = particles.state true_mean, true_var = spectrum.stats(moments='mv') # TODO: add a moments_0 wrapper moment_0 = particles.backend.Storage.empty((1,), dtype=int) moments = particles.backend.Storage.empty((1, 1), dtype=float) # Act state.moments(moment_0, moments, specs={'volume': (0,)}) discr_zero = moments[0, 0] state.moments(moment_0, moments, specs={'volume': (1,)}) discr_mean = moments[0, 0] state.moments(moment_0, moments, specs={'volume': (2,)}) discr_mean_radius_squared = moments[0, 0] state.moments(moment_0, moments, specs={'temperature': (0,)}) discr_zero_T = moments[0, 0] state.moments(moment_0, moments, specs={'temperature': (1,)}) discr_mean_T = moments[0, 0] state.moments(moment_0, moments, specs={'temperature': (2,)}) discr_mean_T_squared = moments[0, 0] # Assert assert abs(discr_zero - 1) / 1 < 1e-3 assert abs(discr_mean - true_mean) / true_mean < .01e-1 true_mrsq = true_var + true_mean**2 assert abs(discr_mean_radius_squared - true_mrsq) / true_mrsq < .05e-1 assert discr_zero_T == discr_zero assert discr_mean_T == 300. assert discr_mean_T_squared == 300. ** 2
def test_moment_0d(backend): # Arrange n_part = 100000 v_mean = 2e-6 d = 1.2 n_sd = 32 spectrum = Lognormal(n_part, v_mean, d) v, n = Linear(spectrum).sample(n_sd) T = np.full_like(v, 300.) n = discretise_n(n) particles = DummyCore(backend, n_sd) attribute = {'n': n, 'volume': v, 'temperature': T, 'heat': T * v} particles.build(attribute) state = particles.particles true_mean, true_var = spectrum.stats(moments='mv') # TODO #217 : add a moments_0 wrapper moment_0 = particles.backend.Storage.empty((1, ), dtype=float) moments = particles.backend.Storage.empty((1, 1), dtype=float) # Act state.moments(moment_0, moments, specs={'volume': (0, )}) discr_zero = moments[0, slice(0, 1)].to_ndarray() state.moments(moment_0, moments, specs={'volume': (1, )}) discr_mean = moments[0, slice(0, 1)].to_ndarray() state.moments(moment_0, moments, specs={'volume': (2, )}) discr_mean_radius_squared = moments[0, slice(0, 1)].to_ndarray() state.moments(moment_0, moments, specs={'temperature': (0, )}) discr_zero_T = moments[0, slice(0, 1)].to_ndarray() state.moments(moment_0, moments, specs={'temperature': (1, )}) discr_mean_T = moments[0, slice(0, 1)].to_ndarray() state.moments(moment_0, moments, specs={'temperature': (2, )}) discr_mean_T_squared = moments[0, slice(0, 1)].to_ndarray() # Assert assert abs(discr_zero - 1) / 1 < 1e-3 assert abs(discr_mean - true_mean) / true_mean < .01e-1 true_mrsq = true_var + true_mean**2 assert abs(discr_mean_radius_squared - true_mrsq) / true_mrsq < .05e-1 assert discr_zero_T == discr_zero assert discr_mean_T == 300. np.testing.assert_approx_equal(discr_mean_T_squared, 300.**2, significant=6)
def init_attributes(self, *, n_in_dv: [float, np.ndarray], kappa: float, r_dry: [float, np.ndarray], rtol=default_rtol): if not isinstance(n_in_dv, np.ndarray): r_dry = np.array([r_dry]) n_in_dv = np.array([n_in_dv]) attributes = {} attributes['dry volume'] = self.formulae.trivia.volume(radius=r_dry) attributes['n'] = discretise_n(n_in_dv) r_wet = r_wet_init(r_dry, self, kappa, rtol=rtol) attributes['volume'] = self.formulae.trivia.volume(radius=r_wet) return attributes
def test_spectrum_moment_0d(backend): # Arrange n_part = 100000 v_mean = 2e-6 d = 1.2 n_sd = 32 spectrum = Lognormal(n_part, v_mean, d) v, n = Linear(spectrum).sample(n_sd) T = np.full_like(v, 300.) n = discretise_n(n) particles = DummyCore(backend, n_sd) attribute = {'n': n, 'volume': v, 'temperature': T, 'heat': T * v} particles.build(attribute) state = particles.particles v_bins = np.linspace(0, 5e-6, num=5, endpoint=True) true_mean, true_var = spectrum.stats(moments='mv') # TODO #217 : add a moments_0 wrapper spectrum_moment_0 = particles.backend.Storage.empty( (len(v_bins) - 1, 1), dtype=float) spectrum_moments = particles.backend.Storage.empty( (len(v_bins) - 1, 1), dtype=float) moment_0 = particles.backend.Storage.empty((1, ), dtype=float) moments = particles.backend.Storage.empty((1, 1), dtype=float) v_bins_edges = particles.backend.Storage.from_ndarray(v_bins) # Act state.spectrum_moments(spectrum_moment_0, spectrum_moments, attr='volume', rank=1, attr_bins=v_bins_edges) actual = spectrum_moments.to_ndarray() expected = np.empty((len(v_bins) - 1, 1), dtype=float) for i in range(len(v_bins) - 1): state.moments(moment_0, moments, specs={'volume': (1, )}, attr_range=(v_bins[i], v_bins[i + 1])) expected[i, 0] = moments[0, 0] # Assert np.testing.assert_array_almost_equal(actual, expected)
def build(self, attributes: dict, products: list = ()): for dynamic in self.core.dynamics.values(): dynamic.register(self) for product in products: self.register_product(product) for attribute in attributes: self.request_attribute(attribute) if "<class 'PySDM.dynamics.condensation.condensation.Condensation'>" in self.core.dynamics: # TODO: mapper? self.core.condensation_solver = \ self.core.backend.make_condensation_solver(**self.condensation_params, enable_drop_temperatures='temperatures' in self.req_attr) attributes['n'] = discretise_n(attributes['n']) if self.core.mesh.dimension == 0: attributes['cell id'] = np.zeros_like(attributes['n'], dtype=np.int64) # TODO self.core.state = StateFactory.attributes(self.core, self.req_attr, attributes) return self.core
def init_attributes(self, *, spatial_discretisation, kappa, spectral_discretisation = None, spectro_glacial_discretisation = None, rtol=default_rtol ): super().sync() self.notify() assert spectro_glacial_discretisation is None or spectral_discretisation is None attributes = {} with np.errstate(all='raise'): positions = spatial_discretisation.sample(self.mesh.grid, self.particulator.n_sd) attributes['cell id'], attributes['cell origin'], attributes['position in cell'] = \ self.mesh.cellular_attributes(positions) if spectral_discretisation: r_dry, n_per_kg = spectral_discretisation.sample(self.particulator.n_sd) elif spectro_glacial_discretisation: r_dry, T_fz, n_per_kg = spectro_glacial_discretisation.sample(self.particulator.n_sd) attributes['freezing temperature'] = T_fz else: raise NotImplementedError() attributes['dry volume'] = self.formulae.trivia.volume(radius=r_dry) attributes['kappa times dry volume'] = kappa * attributes['dry volume'] if kappa == 0: r_wet = r_dry else: r_wet = r_wet_init(r_dry, self, kappa_times_dry_volume=attributes['kappa times dry volume'], rtol=rtol, cell_id=attributes['cell id']) rhod = self['rhod'].to_ndarray() cell_id = attributes['cell id'] domain_volume = np.prod(np.array(self.mesh.size)) attributes['n'] = discretise_n(n_per_kg * rhod[cell_id] * domain_volume) attributes['volume'] = self.formulae.trivia.volume(radius=r_wet) return attributes
def __init__(self, n_sd=100, dt_output=1 * si.second, dt_max=1 * si.second): self.n_steps = int(self.total_time / (5 * si.second)) # TODO: rename to n_output self.n_sd = n_sd self.r_dry, self.n = spectral_sampling.logarithmic( n_sd=n_sd, spectrum=Lognormal(norm_factor=1000 / si.milligram * self.mass_of_dry_air, m_mode=50 * si.nanometre, s_geom=1.4), range=(10.633 * si.nanometre, 513.06 * si.nanometre)) self.n = discretise_n(self.n) self.dt_max = dt_max self.dt_output = dt_output self.r_bins_edges = np.linspace(0 * si.micrometre, 20 * si.micrometre, 101, endpoint=True)