def test_coalescence_call(n_sd, backend_class, adaptive): # TODO #330 if backend_class is ThrustRTC: return # Arrange n = np.ones(n_sd) v = np.ones_like(n) env = Box(dv=1, dt=DEFAULTS.dt_coal_range[1]) grid = (25, 25) env.mesh = Mesh(grid, size=grid) particulator, sut = get_dummy_particulator_and_coalescence( backend_class, len(n), environment=env) cell_id, _, _ = env.mesh.cellular_attributes( Pseudorandom.sample(grid, len(n))) attributes = {'n': n, 'volume': v, 'cell id': cell_id} particulator.build(attributes) sut.actual_length = particulator.attributes._ParticleAttributes__idx.length sut.adaptive = adaptive # Act sut() # Assert np.testing.assert_array_equal( cell_id, particulator.attributes['cell id'].to_ndarray(raw=True))
def test_coalescence_call(n_sd, backend, adaptive): # TODO #330 from PySDM.backends import ThrustRTC if backend is ThrustRTC: return # Arrange n = np.ones(n_sd) v = np.ones_like(n) env = Box(dv=1, dt=default_dt_coal_range[1]) grid = (25, 25) env.mesh = Mesh(grid, size=grid) core, sut = get_dummy_core_and_sdm(backend, len(n), environment=env) cell_id, _, _ = env.mesh.cellular_attributes( Pseudorandom.sample(grid, len(n))) attributes = {'n': n, 'volume': v, 'cell id': cell_id} core.build(attributes) u01, _ = sut.rnd_opt.get_random_arrays() sut.actual_length = core.particles._Particles__idx.length sut.adaptive = adaptive # Act sut() # Assert np.testing.assert_array_equal( cell_id, core.particles['cell id'].to_ndarray(raw=True))
def test_multiplicity_overflow(backend = CPU()): # Arrange params = {"gamma": [100.0], "n_init": [1, 1], "v_init": [1, 1], "is_first_in_pair": [True, False], "n_fragment": [4]} n_init = params["n_init"] n_sd = len(n_init) builder = Builder(n_sd, backend) builder.set_environment(Box(dv=np.NaN, dt=np.NaN)) particulator = builder.build(attributes = { "n": np.asarray(n_init), "volume": np.asarray(params["v_init"]) }, products=()) n_pairs = n_sd // 2 rand = [1.0] * n_pairs Eb = [1.0] * n_pairs pairwise_zeros = particulator.PairwiseStorage.from_ndarray(np.array([0.0] * n_pairs)) general_zeros = particulator.Storage.from_ndarray(np.array([0.0] * n_sd)) gamma = particulator.PairwiseStorage.from_ndarray(np.array(params["gamma"])) rand = particulator.PairwiseStorage.from_ndarray(np.array(rand)) Eb = particulator.PairwiseStorage.from_ndarray(np.array(Eb)) breakup_rate = particulator.Storage.from_ndarray(np.array([0.0])) n_fragment = particulator.PairwiseStorage.from_ndarray(np.array(params["n_fragment"])) is_first_in_pair = particulator.PairIndicator(n_sd) is_first_in_pair.indicator[:] = particulator.Storage.from_ndarray( np.asarray(params["is_first_in_pair"])) # Act particulator.collision_coalescence_breakup( enable_breakup=True, gamma=gamma, rand=rand, Ec=pairwise_zeros, Eb=Eb, n_fragment=n_fragment, coalescence_rate=general_zeros, breakup_rate=breakup_rate, is_first_in_pair=is_first_in_pair )
def test_coalescence_2_sd(backend_class): # Arrange s = Settings() s.kernel = Golovin(b=1.5e12) s.formulae.seed = 0 steps = [0, 200] s.n_sd = 2 builder = Builder(n_sd=s.n_sd, backend=backend_class(formulae=s.formulae)) builder.set_environment(Box(dt=s.dt, dv=s.dv)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity( s.spectrum).sample(s.n_sd) builder.add_dynamic(Coalescence(s.kernel, adaptive=False)) particulator = builder.build(attributes) volumes = {} # Act for step in steps: particulator.run(step - particulator.n_steps) volumes[particulator. n_steps] = particulator.attributes['volume'].to_ndarray() # Assert x_max = 0 for volume in volumes.values(): assert x_max < np.amax(volume) x_max = np.amax(volume) assert particulator.attributes.super_droplet_count == 1
def test_coalescence_2_sd(backend): # Arrange s = Settings() s.kernel = Golovin(b=1.5e12) s.formulae.seed = 0 steps = [0, 200] s.n_sd = 2 builder = Builder(n_sd=s.n_sd, backend=backend, formulae=s.formulae) builder.set_environment(Box(dt=s.dt, dv=s.dv)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity(s.spectrum).sample(s.n_sd) builder.add_dynamic(Coalescence(s.kernel)) core = builder.build(attributes) volumes = {} # Act for step in steps: core.run(step - core.n_steps) volumes[core.n_steps] = core.particles['volume'].to_ndarray() # Assert x_max = 0 for volume in volumes.values(): assert x_max < np.amax(volume) x_max = np.amax(volume) print(core.particles['n'].to_ndarray()) assert core.particles.SD_num == 1
def test_critical_supersaturation(): # arrange T = 300 * si.K n_sd = 100 S_max = .01 vdry = np.linspace(.001, 1, n_sd) * si.um**3 builder = Builder(n_sd=n_sd, backend=CPU()) env = Box(dt=np.nan, dv=np.nan) builder.set_environment(env) env['T'] = T particulator = builder.build(attributes={ 'n': np.ones(n_sd), 'volume': np.linspace(.01, 10, n_sd) * si.um**3, 'dry volume': vdry, 'kappa times dry volume': .9 * vdry, 'dry volume organic': np.zeros(n_sd) }, products=[ActivableFraction()]) # act AF = particulator.products['activable fraction'].get(S_max) # assert assert 0 < AF < 1
def make_core(settings, coal_eff): backend = CPU builder = Builder(n_sd=settings.n_sd, backend=backend(settings.formulae)) env = Box(dv=settings.dv, dt=settings.dt) builder.set_environment(env) env["rhod"] = 1.0 attributes = {} attributes["volume"], attributes["n"] = ConstantMultiplicity( settings.spectrum).sample(settings.n_sd) collision = Collision( collision_kernel=settings.kernel, coalescence_efficiency=coal_eff, breakup_efficiency=settings.break_eff, fragmentation_function=settings.fragmentation, adaptive=settings.adaptive, ) builder.add_dynamic(collision) M0 = am.make_arbitrary_moment_product(rank=0, attr="volume", attr_unit="m^3") M1 = am.make_arbitrary_moment_product(rank=1, attr="volume", attr_unit="m^3") M2 = am.make_arbitrary_moment_product(rank=2, attr="volume", attr_unit="m^3") products = (M0(name="M0"), M1(name="M1"), M2(name="M2")) return builder.build(attributes, products)
def run(settings, backend=CPU, observers=()): builder = Builder(n_sd=settings.n_sd, backend=backend) builder.set_environment(Box(dv=settings.dv, dt=settings.dt)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity( settings.spectrum).sample(settings.n_sd) coalescence = Coalescence(settings.kernel) coalescence.adaptive = settings.adaptive builder.add_dynamic(coalescence) products = [ParticlesVolumeSpectrum(), WallTime()] core = builder.build(attributes, products) if hasattr(settings, 'u_term') and 'terminal velocity' in core.particles.attributes: core.particles.attributes[ 'terminal velocity'].approximation = settings.u_term(core) for observer in observers: core.observers.append(observer) vals = {} core.products['wall_time'].reset() for step in settings.steps: core.run(step - core.n_steps) vals[step] = core.products['dv/dlnr'].get(settings.radius_bins_edges) vals[step][:] *= settings.rho exec_time = core.products['wall_time'].get() return vals, exec_time
def get_dummy_core_and_sdm(n_length): core = DummyCore(backend, n_sd=n_length) dv = 1 core.environment = Box(dv=dv, dt=0) sdm = Coalescence(StubKernel(core.backend)) sdm.register(core) return core, sdm
def run(setup, observers=()): builder = Builder(n_sd=setup.n_sd, backend=setup.backend) builder.set_environment(Box(dv=setup.dv, dt=setup.dt)) attributes = {} attributes['volume'], attributes['n'] = constant_multiplicity( setup.n_sd, setup.spectrum, (setup.init_x_min, setup.init_x_max)) coalescence = Coalescence(setup.kernel) coalescence.adaptive = setup.adaptive builder.add_dynamic(coalescence) products = [ParticlesVolumeSpectrum()] particles = builder.build(attributes, products) if hasattr(setup, 'u_term') and 'terminal velocity' in particles.state.attributes: particles.state.attributes[ 'terminal velocity'].approximation = setup.u_term(particles) for observer in observers: particles.observers.append(observer) vals = {} for step in setup.steps: particles.run(step - particles.n_steps) vals[step] = particles.products['dv/dlnr'].get(setup.radius_bins_edges) vals[step][:] *= setup.rho return vals, particles.stats
def make_core(settings): backend = CPU builder = Builder(n_sd=settings.n_sd, backend=backend(settings.formulae)) env = Box(dv=settings.dv, dt=settings.dt) builder.set_environment(env) env["rhod"] = 1.0 attributes = {} attributes["volume"], attributes["n"] = ConstantMultiplicity( settings.spectrum).sample(settings.n_sd) collision = Collision( collision_kernel=settings.kernel, coalescence_efficiency=settings.coal_eff, breakup_efficiency=settings.break_eff, fragmentation_function=settings.fragmentation, adaptive=settings.adaptive, ) builder.add_dynamic(collision) products = ( ParticleSizeSpectrumPerVolume( radius_bins_edges=settings.radius_bins_edges, name="dv/dlnr"), CollisionRatePerGridbox(name="cr"), CollisionRateDeficitPerGridbox(name="crd"), ) return builder.build(attributes, products)
def run_box_NObreakup(settings, step): backend = CPU builder = Builder(n_sd=settings.n_sd, backend=backend(settings.formulae)) env = Box(dv=settings.dv, dt=settings.dt) builder.set_environment(env) env["rhod"] = 1.0 attributes = {} attributes["volume"], attributes["n"] = ConstantMultiplicity( settings.spectrum).sample(settings.n_sd) coal = Coalescence( collision_kernel=settings.kernel, coalescence_efficiency=settings.coal_eff, adaptive=settings.adaptive, ) builder.add_dynamic(coal) products = ( ParticleVolumeVersusRadiusLogarithmSpectrum( radius_bins_edges=settings.radius_bins_edges, name="dv/dlnr"), CollisionRatePerGridbox(name="cr"), CollisionRateDeficitPerGridbox(name="crd"), ) core = builder.build(attributes, products) # run core.run(step - core.n_steps) x = (settings.radius_bins_edges[:-1] / si.micrometres, ) y = core.products["dv/dlnr"].get() return (x, y)
def test_single_collision_bounce(params, backend_class = CPU): # Arrange n_sd = 2 builder = Builder(n_sd, backend_class()) builder.set_environment(Box(dv=np.NaN, dt=np.NaN)) n_init = [1, 1] particulator = builder.build(attributes = { "n": np.asarray(n_init), "volume": np.asarray([100*si.um**3, 100*si.um**3]) }, products = ()) pairwise_zeros = particulator.PairwiseStorage.from_ndarray(np.array([0.0])) general_zeros = particulator.Storage.from_ndarray(np.array([0.0])) gamma = particulator.PairwiseStorage.from_ndarray(np.array([params["gamma"]])) rand = particulator.PairwiseStorage.from_ndarray(np.array([params["rand"]])) n_fragment = particulator.PairwiseStorage.from_ndarray(np.array([4])) is_first_in_pair = make_PairIndicator(backend_class)(n_sd) # Act particulator.collision_coalescence_breakup( enable_breakup=True, gamma=gamma, rand=rand, Ec=pairwise_zeros, Eb=pairwise_zeros, n_fragment=n_fragment, coalescence_rate=general_zeros, breakup_rate=general_zeros, is_first_in_pair=is_first_in_pair ) # Assert assert (particulator.attributes['n'].to_ndarray() == n_init).all()
def test_coalescence(backend, kernel, croupier, adaptive): if backend == ThrustRTC and croupier == 'local': # TODO #358 return if backend == ThrustRTC and adaptive and croupier == 'global': # TODO #329 return # Arrange s = Settings() s.formulae.seed = 0 steps = [0, 800] builder = Builder(n_sd=s.n_sd, backend=backend, formulae=s.formulae) builder.set_environment(Box(dt=s.dt, dv=s.dv)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity(s.spectrum).sample(s.n_sd) builder.add_dynamic(Coalescence(kernel, croupier=croupier, adaptive=adaptive)) core = builder.build(attributes) volumes = {} # Act for step in steps: core.run(step - core.n_steps) volumes[core.n_steps] = core.particles['volume'].to_ndarray() # Assert x_max = 0 for volume in volumes.values(): assert x_max < np.amax(volume) x_max = np.amax(volume)
def run(settings, backend=CPU, observers=()): builder = Builder(n_sd=settings.n_sd, backend=backend(formulae=settings.formulae)) builder.set_environment(Box(dv=settings.dv, dt=settings.dt)) attributes = {} sampling = ConstantMultiplicity(settings.spectrum) attributes["volume"], attributes["n"] = sampling.sample(settings.n_sd) coalescence = Coalescence(collision_kernel=settings.kernel, adaptive=settings.adaptive) builder.add_dynamic(coalescence) products = ( ParticleVolumeVersusRadiusLogarithmSpectrum(settings.radius_bins_edges, name="dv/dlnr"), WallTime(), ) particulator = builder.build(attributes, products) if hasattr(settings, "u_term") and "terminal velocity" in particulator.attributes: particulator.attributes[ "terminal velocity"].approximation = settings.u_term(particulator) for observer in observers: particulator.observers.append(observer) vals = {} particulator.products["wall time"].reset() for step in settings.output_steps: particulator.run(step - particulator.n_steps) vals[step] = particulator.products["dv/dlnr"].get()[0] vals[step][:] *= settings.rho exec_time = particulator.products["wall time"].get() return vals, exec_time
def test_freeze_time_dependent(plot=False): # Arrange cases = ( {'dt': 5e5, 'N': 1}, {'dt': 1e6, 'N': 1}, {'dt': 5e5, 'N': 8}, {'dt': 1e6, 'N': 8}, {'dt': 5e5, 'N': 32}, {'dt': 1e6, 'N': 32}, ) rate = 1e-9 immersed_surface_area = 1 constant.J_het = rate / immersed_surface_area number_of_real_droplets = 1024 total_time = 2e9 # effectively interpretted here as seconds, i.e. cycle = 1 * si.s # dummy (but must-be-set) values vol = 44 # just to enable sign flipping (ice water uses negative volumes), actual value does not matter dv = 666 # products use concentration, just dividing there and multiplying back here, actual value does not matter hgh = lambda t: np.exp(-0.8 * rate * (t - total_time / 10)) low = lambda t: np.exp(-1.2 * rate * (t + total_time / 10)) # Act output = {} for case in cases: n_sd = int(number_of_real_droplets // case['N']) assert n_sd == number_of_real_droplets / case['N'] assert total_time // case['dt'] == total_time / case['dt'] key = f"{case['dt']}:{case['N']}" output[key] = {'unfrozen_fraction': [], 'dt': case['dt'], 'N': case['N']} formulae = Formulae(heterogeneous_ice_nucleation_rate='Constant') builder = Builder(n_sd=n_sd, backend=CPU(formulae=formulae)) env = Box(dt=case['dt'], dv=dv) builder.set_environment(env) builder.add_dynamic(Freezing(singular=False)) attributes = { 'n': np.full(n_sd, int(case['N'])), 'immersed surface area': np.full(n_sd, immersed_surface_area), 'volume': np.full(n_sd, vol) } products = (IceWaterContent(specific=False),) particulator = builder.build(attributes=attributes, products=products) env['a_w_ice'] = np.nan cell_id = 0 for i in range(int(total_time / case['dt']) + 1): particulator.run(0 if i == 0 else 1) ice_mass_per_volume = particulator.products['qi'].get()[cell_id] ice_mass = ice_mass_per_volume * dv ice_number = ice_mass / (const.rho_w * vol) unfrozen_fraction = 1 - ice_number / number_of_real_droplets output[key]['unfrozen_fraction'].append(unfrozen_fraction)
def _make_particulator(): builder = Builder(n_sd=n_sd, backend=CPU()) env = Box(dt=dt, dv=np.nan) builder.set_environment(env) env['T'] = T return builder.build(attributes={ 'n': np.ones(n_sd), 'volume': np.linspace(.01, 10, n_sd) * si.um**3 }, products=(CoolingRate(), ))
def make_particulator( *, constants, n_sd, dt, initial_temperature, singular, seed, shima_T_fz, ABIFM_spec, droplet_volume, total_particle_number, volume ): attributes = {"volume": np.ones(n_sd) * droplet_volume} formulae_ctor_args = {"seed": seed, "constants": constants} if singular: formulae_ctor_args["freezing_temperature_spectrum"] = shima_T_fz else: formulae_ctor_args["heterogeneous_ice_nucleation_rate"] = "ABIFM" formulae = Formulae(**formulae_ctor_args) if singular: sampling = SpectroGlacialSampling( freezing_temperature_spectrum=formulae.freezing_temperature_spectrum, insoluble_surface_spectrum=ABIFM_spec, seed=formulae.seed, ) attributes["freezing temperature"], _, attributes["n"] = sampling.sample(n_sd) else: sampling = ConstantMultiplicity( spectrum=ABIFM_spec, # seed=formulae.seed ) attributes["immersed surface area"], attributes["n"] = sampling.sample(n_sd) attributes["n"] *= total_particle_number builder = Builder(n_sd, CPU(formulae)) env = Box(dt, volume) builder.set_environment(env) env["T"] = initial_temperature env["RH"] = A_VALUE_LARGER_THAN_ONE builder.add_dynamic(Freezing(singular=singular)) return builder.build( attributes=attributes, products=( PySDM_products.Time(name="t"), PySDM_products.AmbientTemperature(name="T"), PySDM_products.SpecificIceWaterContent(name="qi"), ), )
def get_dummy_core_and_sdm(backend, n_length, optimized_random=False, environment=None, substeps=1): core = DummyCore(backend, n_sd=n_length) core.environment = environment or Box(dv=1, dt=default_dt_coal_range[1]) sdm = Coalescence(StubKernel(core.backend), optimized_random=optimized_random, substeps=substeps) sdm.register(core) return core, sdm
def get_dummy_particulator_and_coalescence(backend, n_length, optimized_random=False, environment=None, substeps=1): particulator = DummyParticulator(backend, n_sd=n_length) particulator.environment = environment or Box(dv=1, dt=DEFAULTS.dt_coal_range[1]) coalescence = Coalescence(StubKernel(particulator.backend), optimized_random=optimized_random, substeps=substeps, adaptive=False) coalescence.register(particulator) return particulator, coalescence
def run(setup): builder = Builder(n_sd=setup.n_sd, backend=setup.backend) builder.set_environment(Box(dv=setup.dv, dt=setup.dt)) v, n = constant_multiplicity(setup.n_sd, setup.spectrum, (setup.init_x_min, setup.init_x_max)) attributes = {'n': n, 'volume': v} builder.add_dynamic(Coalescence(setup.kernel)) particles = builder.build(attributes) states = {} for step in setup.steps: particles.run(step - particles.n_steps) return states, particles.stats
def run(settings): builder = Builder(n_sd=settings.n_sd, backend=settings.backend) builder.set_environment(Box(dv=settings.dv, dt=settings.dt)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity( settings.spectrum).sample(settings.n_sd) builder.add_dynamic(Coalescence(settings.kernel)) particles = builder.build(attributes, products=[WallTime()]) states = {} for step in settings.steps: particles.run(step - particles.n_steps) last_wall_time = particles.products['wall_time'].get() return states, last_wall_time
def run(settings, backend): builder = Builder(n_sd=settings.n_sd, backend=backend) builder.set_environment(Box(dv=settings.dv, dt=settings.dt)) attributes = {} sampling = ConstantMultiplicity(settings.spectrum) attributes["volume"], attributes["n"] = sampling.sample(settings.n_sd) builder.add_dynamic(Coalescence(collision_kernel=settings.kernel)) particles = builder.build(attributes, products=(WallTime(), )) states = {} last_wall_time = None for step in settings.output_steps: particles.run(step - particles.n_steps) last_wall_time = particles.products["wall time"].get() return states, last_wall_time
def test_noninteger_fragments(params, flag, backend_class = CPU): # Arrange n_init = params["n_init"] n_sd = len(n_init) builder = Builder(n_sd, backend_class()) builder.set_environment(Box(dv=np.NaN, dt=np.NaN)) particulator = builder.build(attributes = { "n": np.asarray(n_init), "volume": np.asarray(params["v_init"]) }, products = ()) n_pairs = n_sd // 2 rand = [1.0] * n_pairs Eb = [1.0] * n_pairs pairwise_zeros = particulator.PairwiseStorage.from_ndarray(np.array([0.0] * n_pairs)) general_zeros = particulator.Storage.from_ndarray(np.array([0.0] * n_sd)) gamma = particulator.PairwiseStorage.from_ndarray(np.array(params["gamma"])) rand = particulator.PairwiseStorage.from_ndarray(np.array(rand)) Eb = particulator.PairwiseStorage.from_ndarray(np.array(Eb)) breakup_rate = particulator.Storage.from_ndarray(np.array([0.0])) n_fragment = particulator.PairwiseStorage.from_ndarray(np.array(params["n_fragment"])) is_first_in_pair = particulator.PairIndicator(n_sd) is_first_in_pair.indicator[:] = particulator.Storage.from_ndarray( np.asarray(params["is_first_in_pair"])) # Act particulator.collision_coalescence_breakup( enable_breakup=True, gamma=gamma, rand=rand, Ec=pairwise_zeros, Eb=Eb, n_fragment=n_fragment, coalescence_rate=general_zeros, breakup_rate=breakup_rate, is_first_in_pair=is_first_in_pair ) # Assert { 'n': lambda: np.testing.assert_array_equal(particulator.attributes['n'].to_ndarray(), np.array(params["n_expected"])), 'v': lambda: np.testing.assert_array_almost_equal(particulator.attributes['volume'].to_ndarray(), np.array(params["v_expected"]), decimal=6), 'conserve': lambda: np.testing.assert_almost_equal(np.sum(particulator.attributes['n'].to_ndarray() * particulator.attributes['volume'].to_ndarray()), np.sum(np.array(params["n_init"]) * np.array(params["v_init"])), decimal=6) }[flag]()
def test_coalescence(croupier): # Arrange v_min = 4.186e-15 v_max = 4.186e-12 n_sd = 2**13 steps = [0, 30, 60] X0 = 4 / 3 * np.pi * 30.531e-6**3 n_part = 2**23 / si.metre**3 dv = 1e6 * si.metres**3 dt = 1 * si.seconds norm_factor = n_part * dv rho = 1000 * si.kilogram / si.metre**3 kernel = Golovin(b=1.5e3) # [s-1] spectrum = Exponential(norm_factor=norm_factor, scale=X0) particles_builder = Builder(n_sd=n_sd, backend=backend) particles_builder.set_environment(Box(dt=dt, dv=dv)) attributes = {} attributes['volume'], attributes['n'] = constant_multiplicity( n_sd, spectrum, (v_min, v_max)) particles_builder.add_dynamic(Coalescence(kernel, seed=256)) particles = particles_builder.build(attributes) particles.croupier = croupier class Seed: seed = 0 def __call__(self): Seed.seed += 1 return Seed.seed particles.dynamics[str(Coalescence)].seed = Seed() states = {} # Act for step in steps: particles.run(step - particles.n_steps) check(n_part, dv, n_sd, rho, particles.state, step) states[particles.n_steps] = copy.deepcopy(particles.state) # Assert x_max = 0 for state in states.values(): assert x_max < np.amax(state['volume'].to_ndarray()) x_max = np.amax(state['volume'].to_ndarray())
def test_nonadaptive_same_results_regardless_of_dt(dt, backend_class = CPU): # Arrange attributes = {"n": np.asarray([1, 1]), "volume": np.asarray([100*si.um**3, 100*si.um**3])} breakup = Breakup(ConstantK(1 * si.cm**3 / si.s), AlwaysN(4), adaptive=False) nsteps = 10 n_sd = len(attributes["n"]) builder = Builder(n_sd, backend_class()) builder.set_environment(Box(dv=1*si.cm**3, dt=dt)) builder.add_dynamic(breakup) particulator = builder.build(attributes = attributes, products = ()) # Act particulator.run(nsteps) # Assert assert (particulator.attributes['n'].to_ndarray() > 0).all() assert (particulator.attributes['n'].to_ndarray() != attributes['n']).any() assert (np.sum(particulator.attributes['n'].to_ndarray()) >= np.sum(attributes['n'])) assert (particulator.attributes['n'].to_ndarray() == np.array([1024, 1024])).all()
def test_coalescence(backend, croupier, adaptive): if backend == ThrustRTC and croupier == 'local': # TODO #358 return if backend == ThrustRTC and adaptive and croupier == 'global': # TODO #329 return # Arrange formulae = Formulae(seed=256) n_sd = 2**14 steps = [0, 100, 200] X0 = formulae.trivia.volume(radius=30.531e-6) n_part = 2**23 / si.metre**3 dv = 1e6 * si.metres**3 dt = 1 * si.seconds norm_factor = n_part * dv rho = 1000 * si.kilogram / si.metre**3 kernel = Golovin(b=1.5e3) # [s-1] spectrum = Exponential(norm_factor=norm_factor, scale=X0) builder = Builder(n_sd=n_sd, backend=backend(formulae=formulae)) builder.set_environment(Box(dt=dt, dv=dv)) attributes = {} attributes['volume'], attributes['n'] = ConstantMultiplicity( spectrum).sample(n_sd) builder.add_dynamic( Coalescence(kernel, croupier=croupier, adaptive=adaptive)) particulator = builder.build(attributes) volumes = {} # Act for step in steps: particulator.run(step - particulator.n_steps) check(n_part, dv, n_sd, rho, particulator.attributes, step) volumes[particulator. n_steps] = particulator.attributes['volume'].to_ndarray() # Assert x_max = 0 for volume in volumes.values(): assert x_max < np.amax(volume) x_max = np.amax(volume)
def test_breakup_counters(params, backend_class = CPU): # Arrange n_init = params["n_init"] n_sd = len(n_init) builder = Builder(n_sd, backend_class()) builder.set_environment(Box(dv=np.NaN, dt=np.NaN)) particulator = builder.build(attributes = { "n": np.asarray(n_init), "volume": np.asarray([100*si.um**3] * n_sd) }, products = ()) n_pairs = n_sd // 2 pairwise_zeros = particulator.PairwiseStorage.from_ndarray(np.array([0.0] * n_pairs)) general_zeros = particulator.Storage.from_ndarray(np.array([0.0] * n_sd)) gamma = particulator.PairwiseStorage.from_ndarray(np.array([params["gamma"]] * n_pairs)) rand = particulator.PairwiseStorage.from_ndarray(np.array([params["rand"]] * n_pairs)) Eb = particulator.PairwiseStorage.from_ndarray(np.array([params["Eb"]] * n_pairs)) breakup_rate = particulator.Storage.from_ndarray(np.array([0.0])) n_fragment = particulator.PairwiseStorage.from_ndarray(np.array([4] * n_pairs)) is_first_in_pair = particulator.PairIndicator(n_sd) is_first_in_pair.indicator[:] = particulator.Storage.from_ndarray( np.asarray(params["is_first_in_pair"])) # Act particulator.collision_coalescence_breakup( enable_breakup=True, gamma=gamma, rand=rand, Ec=pairwise_zeros, Eb=Eb, n_fragment=n_fragment, coalescence_rate=general_zeros, breakup_rate=breakup_rate, is_first_in_pair=is_first_in_pair ) # Assert cell_id = 0 assert (breakup_rate.to_ndarray()[cell_id] == np.sum(params["gamma"] * get_smaller_of_pairs(is_first_in_pair, n_init)))
def simulation( *, constants, seed, n_sd, time_step, volume, spectrum, droplet_volume, multiplicity, total_time, number_of_real_droplets, cooling_rate=0, heterogeneous_ice_nucleation_rate="Constant", initial_temperature=np.nan, ): formulae = Formulae( seed=seed, heterogeneous_ice_nucleation_rate=heterogeneous_ice_nucleation_rate, constants=constants, ) builder = Builder(n_sd=n_sd, backend=CPU(formulae=formulae)) env = Box(dt=time_step, dv=volume) builder.set_environment(env) builder.add_dynamic(Freezing(singular=False)) if hasattr(spectrum, "s_geom") and spectrum.s_geom == 1: _isa, _conc = np.full(n_sd, spectrum.m_mode), np.full( n_sd, multiplicity / volume) else: _isa, _conc = spectral_sampling.ConstantMultiplicity(spectrum).sample( n_sd) attributes = { "n": discretise_multiplicities(_conc * volume), "immersed surface area": _isa, "volume": np.full(n_sd, droplet_volume), } np.testing.assert_almost_equal(attributes["n"], multiplicity) products = ( IceWaterContent(name="qi"), TotalUnfrozenImmersedSurfaceArea(name="A_tot"), ) particulator = builder.build(attributes=attributes, products=products) temperature = initial_temperature env["a_w_ice"] = np.nan svp = particulator.formulae.saturation_vapour_pressure cell_id = 0 f_ufz = [] a_tot = [] for i in range(int(total_time / time_step) + 1): if cooling_rate != 0: temperature -= cooling_rate * time_step / 2 env["a_w_ice"] = svp.ice_Celsius( temperature - const.T0) / svp.pvs_Celsius(temperature - const.T0) particulator.run(0 if i == 0 else 1) if cooling_rate != 0: temperature -= cooling_rate * time_step / 2 ice_mass_per_volume = particulator.products["qi"].get()[cell_id] ice_mass = ice_mass_per_volume * volume ice_number = ice_mass / (formulae.constants.rho_w * droplet_volume) unfrozen_fraction = 1 - ice_number / number_of_real_droplets f_ufz.append(unfrozen_fraction) a_tot.append(particulator.products["A_tot"].get()[cell_id]) return f_ufz, a_tot