Ejemplo n.º 1
0
    def test_size_distribution_n_part(scale):
        # Arrange
        scale = 1
        n_part = 256
        sut = Exponential(n_part, scale)

        # Act
        m, dm = np.linspace(0, 5, 10000, retstep=True)
        sd = sut.size_distribution(m)

        # Assert
        assert_approx_equal(np.sum(sd) * dm, n_part, 2)
Ejemplo n.º 2
0
class SetupA:
    init_x_min = phys.volume(radius=10 *
                             si.micrometres)  # not given in the paper
    init_x_max = phys.volume(radius=100 *
                             si.micrometres)  # not given in the paper

    n_sd = 2**13
    n_part = 2**23 / si.metre**3
    X0 = 4 / 3 * np.pi * (30.531 * si.micrometres)**3
    dv = 1e6 * si.metres**3
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre**3
    dt = 1 * si.seconds
    seed = 44
    steps = [0, 1200, 2400, 3600]

    kernel = Golovin(b=1.5e3 / si.second)
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)

    radius_bins_edges = np.logspace(np.log10(10 * si.um),
                                    np.log10(5e3 * si.um),
                                    num=64,
                                    endpoint=True)

    backend = Default
Ejemplo n.º 3
0
class Settings:
    init_x_min = phys.volume(radius=3.94 * si.micrometre)
    init_x_max = phys.volume(radius=25 * si.micrometres)

    n_sd = 2**13
    n_part = 239 / si.cm**3
    X0 = 4 / 3 * np.pi * (10 * si.micrometres)**3
    dv = 1e1 * si.metres**3  # 1e6 do not work with ThrustRTC (overflow?)
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre**3
    dt = 1 * si.seconds
    adaptive = False
    seed = 44
    _steps = [200 * i for i in range(10)]

    @property
    def steps(self):
        return [int(step / self.dt) for step in self._steps]

    kernel = Geometric(collection_efficiency=1)
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)

    # TODO 220 instead of 200 to smoothing
    radius_bins_edges = np.logspace(np.log10(3.94 * si.um),
                                    np.log10(220 * si.um),
                                    num=100,
                                    endpoint=True)
Ejemplo n.º 4
0
class TestSum:
    scale = 1
    n_part = 256
    exponential = Exponential(n_part, scale)

    s = 1.001
    r_mode = 1e-6
    lognormal = Lognormal(1, r_mode, s)

    @staticmethod
    def test_size_distribution():
        # Arrange
        sut = Sum((TestSum.exponential, ))

        # Act
        x = np.linspace(0, 1)
        sut_sd = sut.size_distribution(x)
        exp_sd = TestSum.exponential.size_distribution(x)

        # Assert
        np.testing.assert_array_equal(sut_sd, exp_sd)

    @staticmethod
    def test_cumulative():
        # Arrange
        sut = Sum((TestSum.exponential, ))

        # Act
        x = np.linspace(0, 1)
        sut_c = sut.cumulative(x)
        exp_c = TestSum.exponential.cumulative(x)

        # Assert
        np.testing.assert_array_equal(sut_c, exp_c)

    @staticmethod
    @pytest.mark.parametrize("distributions", [
        pytest.param((exponential, ), id="single exponential"),
        pytest.param((lognormal, ), id="single lognormal"),
        pytest.param((exponential, exponential), id="2 exponentials")
    ])
    def test_percentiles(distributions):
        # Arrange
        sut = Sum(distributions)

        # Act
        cdf_values = np.linspace(*default_cdf_range, 100)
        sut_p = sut.percentiles(cdf_values)
        exp_p = distributions[0].percentiles(cdf_values)

        # Assert
        np.testing.assert_array_almost_equal(sut_p, exp_p, decimal=3)
Ejemplo n.º 5
0
def test_coalescence(croupier):
    # Arrange
    v_min = 4.186e-15
    v_max = 4.186e-12
    n_sd = 2**13
    steps = [0, 30, 60]
    X0 = 4 / 3 * np.pi * 30.531e-6**3
    n_part = 2**23 / si.metre**3
    dv = 1e6 * si.metres**3
    dt = 1 * si.seconds
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre**3

    kernel = Golovin(b=1.5e3)  # [s-1]
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)
    particles_builder = Builder(n_sd=n_sd, backend=backend)
    particles_builder.set_environment(Box(dt=dt, dv=dv))
    attributes = {}
    attributes['volume'], attributes['n'] = constant_multiplicity(
        n_sd, spectrum, (v_min, v_max))
    particles_builder.add_dynamic(Coalescence(kernel, seed=256))
    particles = particles_builder.build(attributes)
    particles.croupier = croupier

    class Seed:
        seed = 0

        def __call__(self):
            Seed.seed += 1
            return Seed.seed

    particles.dynamics[str(Coalescence)].seed = Seed()

    states = {}

    # Act
    for step in steps:
        particles.run(step - particles.n_steps)
        check(n_part, dv, n_sd, rho, particles.state, step)
        states[particles.n_steps] = copy.deepcopy(particles.state)

    # Assert
    x_max = 0
    for state in states.values():
        assert x_max < np.amax(state['volume'].to_ndarray())
        x_max = np.amax(state['volume'].to_ndarray())
Ejemplo n.º 6
0
class Settings:

    n_sd = 2 ** 13
    n_part = 2 ** 23 / si.metre**3
    X0 = 4 / 3 * np.pi * (30.531 * si.micrometres) ** 3
    dv = 1e6 * si.metres**3
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre**3
    dt = 1 * si.seconds
    adaptive = False
    seed = 44
    _steps = [0, 1200, 2400, 3600]

    @property
    def steps(self):
        return [int(step/self.dt) for step in self._steps]

    kernel = Golovin(b=1.5e3 / si.second)
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)

    radius_bins_edges = np.logspace(np.log10(10 * si.um), np.log10(5e3 * si.um), num=128, endpoint=True)
Ejemplo n.º 7
0
def test_coalescence(backend_class, croupier, adaptive):
    if backend_class == ThrustRTC and croupier == 'local':  # TODO #358
        return
    if backend_class == ThrustRTC and adaptive and croupier == 'global':  # TODO #329
        return
    # Arrange
    formulae = Formulae(seed=256)
    n_sd = 2 ** 14
    steps = [0, 100, 200]
    X0 = formulae.trivia.volume(radius=30.531e-6)
    n_part = 2 ** 23 / si.metre ** 3
    dv = 1e6 * si.metres ** 3
    dt = 1 * si.seconds
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre ** 3

    kernel = Golovin(b=1.5e3)  # [s-1]
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)
    builder = Builder(n_sd=n_sd, backend=backend_class(formulae=formulae))
    builder.set_environment(Box(dt=dt, dv=dv))
    attributes = {}
    attributes['volume'], attributes['n'] = ConstantMultiplicity(spectrum).sample(n_sd)
    builder.add_dynamic(Coalescence(kernel, croupier=croupier, adaptive=adaptive))
    particulator = builder.build(attributes)

    volumes = {}

    # Act
    for step in steps:
        particulator.run(step - particulator.n_steps)
        check(n_part, dv, n_sd, rho, particulator.attributes, step)
        volumes[particulator.n_steps] = particulator.attributes['volume'].to_ndarray()

    # Assert
    x_max = 0
    for volume in volumes.values():
        assert x_max < np.amax(volume)
        x_max = np.amax(volume)
Ejemplo n.º 8
0
 def __init__(self):
     self.formulae = Formulae()
     self.n_sd = 2**20
     self.n_part = 100 / si.cm**3
     self.X0 = self.formulae.trivia.volume(radius=30.531 * si.micrometres)
     self.dv = 1 * si.m**3
     self.norm_factor = self.n_part * self.dv
     self.rho = 1000 * si.kilogram / si.metre**3
     self.dt = 1 * si.seconds
     self.adaptive = False
     self.seed = 44
     self._steps = [0]
     self.kernel = Geometric()
     self.coal_eff = Berry1967()
     self.fragmentation = ExponFrag(scale=100.0 * si.micrometres)
     self.break_eff = ConstEb(1.0)  # no "bouncing"
     self.spectrum = Exponential(norm_factor=self.norm_factor,
                                 scale=self.X0)
     self.radius_bins_edges = np.logspace(np.log10(10 * si.um),
                                          np.log10(5000 * si.um),
                                          num=128,
                                          endpoint=True)
     self.radius_range = [0 * si.um, 1e6 * si.um]
Ejemplo n.º 9
0
class Setup:
    init_x_min = phys.volume(radius=3.94 * si.micrometre)
    init_x_max = phys.volume(radius=25 * si.micrometres)

    n_sd = 2**13
    n_part = 239 / si.cm**3
    X0 = 4 / 3 * np.pi * (10 * si.micrometres)**3
    dv = 1e1 * si.metres**3
    norm_factor = n_part * dv
    rho = 1000 * si.kilogram / si.metre**3
    dt = 10 * si.seconds
    seed = 44
    steps = [200 * i for i in range(10)]

    kernel = Gravitational(collection_efficiency=None)
    spectrum = Exponential(norm_factor=norm_factor, scale=X0)

    radius_bins_edges = np.logspace(np.log10(3.94 * si.um),
                                    np.log10(200 * si.um),
                                    num=128,
                                    endpoint=True)

    backend = Default