Ejemplo n.º 1
0
class PeriodicChannel2DLinkedList(PeriodicChannel2DTestCase):
    def setUp(self):
        PeriodicChannel2DTestCase.setUp(self)
        self.nnps = LinkedListNNPS(dim=2,
                                   particles=self.particles,
                                   domain=self.domain,
                                   radius_scale=self.kernel.radius_scale)

    def test_periodicity_flags(self):
        "LinkedListNNPS :: test periodicity flags"
        self._test_periodicity_flags()

    def test_summation_density(self):
        "LinkedListNNPS :: test summation density"
        self._test_summation_density()

    def test_add_property_after_creation_works(self):
        # Given
        particles = self.particles
        fluid = particles[0]

        # When
        fluid.add_property('junk')

        # Then
        self.nnps.update_domain()
        self._test_summation_density()
Ejemplo n.º 2
0
def remove_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
    """
    This function will take 2 particle arrays as input and will remove all
    the particles of the first particle array which are in the vicinity of
    the particles from second particle array. The function will remove all
    the particles within the dx_solid vicinity so some particles are removed
    at the outer surface of the particles from the second particle array.
    This uses a pysph nearest neighbour particles search which will output
    the particles within some range for every given particle.

    Parameters
    ----------
    fluid_parray : a pysph particle array object
    solid_parray : a pysph particle array object
    dx_solid : a number which is the dx of the second particle array
    dim : dimensionality of the problem

    The particle arrays should atleast contain x, y and h values for a 2d case
    and atleast x, y, z and h values for a 3d case

    Returns
    -------
    particle_array : pysph wcsph_particle_array with x, y, z and h values
    """

    x = fluid_parray.x
    x1 = solid_parray.x
    y = fluid_parray.y
    y1 = solid_parray.y
    z = fluid_parray.z
    z1 = solid_parray.z
    h = fluid_parray.h
    if dim == 2:
        z = np.zeros_like(x)
        z1 = np.zeros_like(x1)
    modified_points = []
    h_new = []
    ll_nnps = LinkedListNNPS(dim, [fluid_parray, solid_parray])
    for i in range(len(x)):
        nbrs = UIntArray()
        ll_nnps.get_nearest_particles(1, 0, i, nbrs)
        point_i = np.array([x[i], y[i], z[i]])
        near_points = nbrs.get_npy_array()
        distances = []
        for ind in near_points:
            dest = [x1[ind], y1[ind], z1[ind]]
            distances.append(distance(point_i, dest))
        if len(distances) == 0:
            modified_points.append(point_i)
            h_new.append(h[i])
        elif min(distances) >= (dx_solid * (1.0 - 1.0e-07)):
            modified_points.append(point_i)
            h_new.append(h[i])
    modified_points = np.array(modified_points)
    x_new = modified_points[:, 0]
    y_new = modified_points[:, 1]
    z_new = modified_points[:, 2]
    p_array = get_particle_array_wcsph(x=x_new, y=y_new, z=z_new, h=h_new)
    return p_array
Ejemplo n.º 3
0
def time_pysph_nnps(pa, nbrs, num_particles):
    start_time = time.time()
    r = 1/1.101
    nn = LinkedListNNPS(dim=3, particles=[pa], radius_scale=r, cache=True)
    nn.set_context(0,0)
    for i in range(num_particles):
        nn.get_nearest_particles(0,0,i,nbrs)
    return time.time() - start_time
 def _make_accel_eval(self, equations):
     arrays = [self.pa]
     kernel = CubicSpline(dim=self.dim)
     a_eval = AccelerationEval(
         particle_arrays=arrays, equations=equations, kernel=kernel
     )
     comp = SPHCompiler(a_eval, integrator=None)
     comp.compile()
     nnps = NNPS(dim=kernel.dim, particles=arrays)
     nnps.update()
     a_eval.set_nnps(nnps)
     return a_eval
Ejemplo n.º 5
0
 def _make_accel_eval(self, equations, cache_nnps=False):
     arrays = [self.pa]
     kernel = CubicSpline(dim=self.dim)
     a_eval = AccelerationEval(
         particle_arrays=arrays, equations=equations, kernel=kernel
     )
     comp = SPHCompiler(a_eval, integrator=None)
     comp.compile()
     nnps = NNPS(dim=kernel.dim, particles=arrays, cache=cache_nnps)
     nnps.update()
     a_eval.set_nnps(nnps)
     return a_eval
Ejemplo n.º 6
0
class SPHEvaluator(object):
    def __init__(self,
                 arrays,
                 equations,
                 dim,
                 kernel=None,
                 domain_manager=None):
        """Constructor.

        Parameters
        ----------
        arrays: list(ParticleArray)
        equations: list
        dim: int
        kernel: kernel instance.
        domain_manager: DomainManager
        """
        self.arrays = arrays
        self.equations = equations
        self.domain_manager = domain_manager
        self.dim = dim
        if kernel is None:
            self.kernel = Gaussian(dim=dim)
        else:
            self.kernel = kernel

        self.func_eval = AccelerationEval(arrays, equations, self.kernel)
        compiler = SPHCompiler(self.func_eval, None)
        compiler.compile()
        self._create_nnps(arrays)

    def evaluate(self, t=0.0, dt=0.1):
        """Evalute the SPH equations, dummy t and dt values can
        be passed.
        """
        self.func_eval.compute(t, dt)

    def update_particle_arrays(self, arrays):
        self._create_nnps(arrays)
        self.func_eval.update_particle_arrays(arrays)

    #### Private protocol ###################################################
    def _create_nnps(self, arrays):
        self.nnps = NNPS(dim=self.kernel.dim,
                         particles=arrays,
                         radius_scale=self.kernel.radius_scale,
                         domain=self.domain_manager,
                         cache=True)
        self.nnps.update()
        self.func_eval.set_nnps(self.nnps)
Ejemplo n.º 7
0
def find_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
    """This function will take 2 particle arrays as input and will find all the
    particles of the first particle array which are in the vicinity of the
    particles from second particle array. The function will find all the
    particles within the dx_solid vicinity so some particles may be identified
    at the outer surface of the particles from the second particle array.

    The particle arrays should atleast contain x, y and h values for a 2d case
    and atleast x, y, z and h values for a 3d case.

    Parameters
    ----------
    fluid_parray : a pysph particle array object
    solid_parray : a pysph particle array object
    dx_solid : a number which is the dx of the second particle array
    dim : dimensionality of the problem

    Returns
    -------
    list of particle indices to remove from the first array.

    """

    x = fluid_parray.x
    x1 = solid_parray.x
    y = fluid_parray.y
    y1 = solid_parray.y
    z = fluid_parray.z
    z1 = solid_parray.z
    if dim == 2:
        z = np.zeros_like(x)
        z1 = np.zeros_like(x1)
    to_remove = []
    ll_nnps = LinkedListNNPS(dim, [fluid_parray, solid_parray])
    for i in range(len(x)):
        nbrs = UIntArray()
        ll_nnps.get_nearest_particles(1, 0, i, nbrs)
        point_i = np.array([x[i], y[i], z[i]])
        near_points = nbrs.get_npy_array()
        distances = []
        for ind in near_points:
            dest = [x1[ind], y1[ind], z1[ind]]
            distances.append(distance(point_i, dest))
        if len(distances) == 0:
            continue
        elif min(distances) < (dx_solid * (1.0 - 1.0e-07)):
            to_remove.append(i)
    return to_remove
Ejemplo n.º 8
0
 def setUp(self):
     PeriodicBox2DTestCaseCPU.setUp(self)
     self.orig_n = self.fluid.get_number_of_particles()
     self.nnps = LinkedListNNPS(
         dim=2, particles=[self.fluid],
         domain=self.domain,
         radius_scale=self.kernel.radius_scale)
Ejemplo n.º 9
0
 def _create_nnps(self, arrays):
     self.nnps = NNPS(dim=self.kernel.dim,
                      particles=arrays,
                      radius_scale=self.kernel.radius_scale,
                      domain=self.domain_manager,
                      cache=True)
     self.nnps.update()
     self.func_eval.set_nnps(self.nnps)
Ejemplo n.º 10
0
 def test_linked_list_nnps_large_num_cells(self):
     '''
     Tests LinkedListNNPS for large number of cells.
     Fails with a malloc error
     '''
     print ""
     for key in self.dataset_large_domain:
         x, y, z, h = self.dataset_large_domain[key]
         pa = get_particle_array(x=x, y=y, z=z, h=h)
         qid = randint(0,pa.get_number_of_particles()-1)
         nn_bf = brute_force_neighbours(pa, pa.x[qid], pa.y[qid],
                 pa.z[qid], pa.h[qid])
         r = 1/1.101
         sp = LinkedListNNPS(dim=3, particles=[pa], radius_scale=r, cache=True)
         nn_sp = UIntArray()
         sp.set_context(0,0)
         sp.get_nearest_particles(0,0,qid,nn_sp)
         print " - " + key
         assert equal(nn_bf, nn_sp)
Ejemplo n.º 11
0
 def setUp(self):
     TestPeriodicChannel3D.setUp(self)
     l = self.l
     self.domain = DomainManager(zmin=-l / 2.0,
                                 zmax=l / 2.0,
                                 periodic_in_z=True)
     self.nnps = LinkedListNNPS(dim=3,
                                particles=self.particles,
                                domain=self.domain,
                                radius_scale=self.kernel.radius_scale)
Ejemplo n.º 12
0
 def _setup_integrator(self, equations, integrator):
     kernel = CubicSpline(dim=1)
     arrays = [self.pa]
     a_eval = AccelerationEval(particle_arrays=arrays,
                               equations=equations,
                               kernel=kernel)
     comp = SPHCompiler(a_eval, integrator=integrator)
     comp.compile()
     nnps = LinkedListNNPS(dim=kernel.dim, particles=arrays)
     a_eval.set_nnps(nnps)
     integrator.set_nnps(nnps)
Ejemplo n.º 13
0
    def test_neighbors_cached_properly(self):
        # Given
        pa1 = self._make_random_parray('pa1', 5)
        pa2 = self._make_random_parray('pa2', 4)
        particles = [pa1, pa2]
        nnps = LinkedListNNPS(dim=3, particles=particles)

        for dst_index in (0, 1):
            for src_idx in (0, 1):
                # When
                cache = NeighborCache(nnps, dst_index, src_idx)
                cache.update()
                nb_cached = UIntArray()
                nb_direct = UIntArray()

                # Then.
                for i in range(len(particles[dst_index].x)):
                    nnps.get_nearest_particles_no_cache(
                        src_idx, dst_index, i, nb_direct, False)
                    cache.get_neighbors(src_idx, i, nb_cached)
                    nb_e = nb_direct.get_npy_array()
                    nb_c = nb_cached.get_npy_array()
                    self.assertTrue(np.all(nb_e == nb_c))
Ejemplo n.º 14
0
    def test_empty_neigbors_works_correctly(self):
        # Given
        pa1 = self._make_random_parray('pa1', 5)
        pa2 = self._make_random_parray('pa2', 2)
        pa2.x += 10.0
        particles = [pa1, pa2]

        # When
        nnps = LinkedListNNPS(dim=3, particles=particles)
        # Cache for neighbors of destination 0.
        cache = NeighborCache(nnps, dst_index=0, src_index=1)
        cache.update()

        # Then
        nb_cached = UIntArray()
        nb_direct = UIntArray()
        for i in range(len(particles[0].x)):
            nnps.get_nearest_particles_no_cache(1, 0, i, nb_direct, False)
            # Get neighbors from source 1 on destination 0.
            cache.get_neighbors(src_index=1, d_idx=i, nbrs=nb_cached)
            nb_e = nb_direct.get_npy_array()
            nb_c = nb_cached.get_npy_array()
            self.assertEqual(len(nb_e), 0)
            self.assertTrue(np.all(nb_e == nb_c))
Ejemplo n.º 15
0
    def setUp(self):
        # create the particle arrays
        L = 1.0
        n = 5
        dx = L / n
        hdx = 1.5
        self.L = L
        self.vol = vol = dx * dx * dx

        # fluid particles
        xx, yy, zz = np.mgrid[dx / 2:L:dx, dx / 2:L:dx, dx / 2:L:dx]

        x = xx.ravel()
        y = yy.ravel()
        z = zz.ravel()  # particle positions
        p = self._get_pressure(x, y, z)
        h = np.ones_like(x) * hdx * dx  # smoothing lengths
        m = np.ones_like(x) * vol  # mass
        V = np.zeros_like(x)  # volumes

        fluid = get_particle_array(name='fluid',
                                   x=x,
                                   y=y,
                                   z=z,
                                   h=h,
                                   m=m,
                                   V=V,
                                   p=p)

        # particles and domain
        self.fluid = fluid
        self.domain = DomainManager(xmin=0,
                                    xmax=L,
                                    ymin=0,
                                    ymax=L,
                                    zmin=0,
                                    zmax=L,
                                    periodic_in_x=True,
                                    periodic_in_y=True,
                                    periodic_in_z=True)
        self.kernel = get_compiled_kernel(Gaussian(dim=3))

        self.orig_n = self.fluid.get_number_of_particles()
        self.nnps = LinkedListNNPS(dim=3,
                                   particles=[self.fluid],
                                   domain=self.domain,
                                   radius_scale=self.kernel.radius_scale)
Ejemplo n.º 16
0
    def test_setting_use_cache_does_cache(self):
        # Given
        pa = self._make_random_parray('pa1', 3)
        pa.h[:] = 1.0
        nnps = LinkedListNNPS(dim=3, particles=[pa], cache=False)
        n = pa.get_number_of_particles()

        # When
        nnps.set_use_cache(True)
        nbrs = UIntArray()
        nnps.set_context(0, 0)
        for i in range(n):
            nnps.get_nearest_particles(0, 0, i, nbrs)

        # Then
        self.assertEqual(nbrs.length, n)
        # Find the length of all cached neighbors,
        # in this case, each particle has n neighbors,
        # so we should have n*n neighbors in all.
        total_length = sum(x.length for x in nnps.cache[0]._neighbor_arrays)
        self.assertEqual(total_length, n * n)
Ejemplo n.º 17
0
    def test_cache_updates_with_changed_particles(self):
        # Given
        pa1 = self._make_random_parray('pa1', 5)
        particles = [pa1]
        nnps = LinkedListNNPS(dim=3, particles=particles)
        cache = NeighborCache(nnps, dst_index=0, src_index=0)
        cache.update()

        # When
        pa2 = self._make_random_parray('pa2', 2)
        pa1.add_particles(x=pa2.x, y=pa2.y, z=pa2.z)
        nnps.update()
        cache.update()
        nb_cached = UIntArray()
        nb_direct = UIntArray()
        for i in range(len(particles[0].x)):
            nnps.get_nearest_particles_no_cache(0, 0, i, nb_direct, False)
            cache.get_neighbors(0, i, nb_cached)
            nb_e = nb_direct.get_npy_array()
            nb_c = nb_cached.get_npy_array()
            self.assertTrue(np.all(nb_e == nb_c))
Ejemplo n.º 18
0
print 'Volume estimates :: dx^2 = %g, Number density = %g'%(dx*dy, volume)

x = x.ravel(); y = y.ravel()
h = numpy.ones_like(x) * h0
m = numpy.ones_like(x) * volume
wij = numpy.zeros_like(x)

# use the helper function get_particle_array to create a ParticleArray
pa = utils.get_particle_array(x=x,y=y,h=h,m=m,wij=wij)

# the simulation domain used to request periodicity
domain = DomainManager(
    xmin=0., xmax=1., ymin=0., ymax=1.,periodic_in_x=True, periodic_in_y=True)

# NNPS object for nearest neighbor queries
nps = LinkedListNNPS(dim=2, particles=[pa,], radius_scale=k.radius_scale, domain=domain)

# container for neighbors
nbrs = UIntArray()

# arrays including ghosts
x, y, h, m  = pa.get('x', 'y', 'h', 'm', only_real_particles=False)

# iterate over destination particles
t1 = time()
max_ngb = -1
for i in range( pa.num_real_particles ):
    xi = x[i]; yi = y[i]; hi = h[i]
    
    # get list of neighbors
    nps.get_nearest_particles(0, 0, i, nbrs)
Ejemplo n.º 19
0
# use the helper function get_particle_array to create a ParticleArray
pa = utils.get_particle_array(x=x, y=y, h=h, m=m, wij=wij)

# the simulation domain used to request periodicity
domain = DomainManager(xmin=0.,
                       xmax=1.,
                       ymin=0.,
                       ymax=1.,
                       periodic_in_x=True,
                       periodic_in_y=True)

# NNPS object for nearest neighbor queries
nps = LinkedListNNPS(dim=2,
                     particles=[
                         pa,
                     ],
                     radius_scale=k.radius_scale,
                     domain=domain)

# container for neighbors
nbrs = UIntArray()

# arrays including ghosts
x, y, h, m = pa.get('x', 'y', 'h', 'm', only_real_particles=False)

# iterate over destination particles
t1 = time()
max_ngb = -1
for i in range(pa.num_real_particles):
    xi = x[i]
    yi = y[i]
Ejemplo n.º 20
0
    def __init__(self,
                 all_particles,
                 scheme,
                 domain=None,
                 innerloop=True,
                 updates=True,
                 parallel=False,
                 steps=None,
                 D=0):
        """The second integrator is a simple Euler-Integrator (accurate
        enough due to very small time steps; very fast) using EBGSteps.
        EBGSteps are basically the same as EulerSteps, exept for the fact
        that they work with an intermediate ebg velocity [eu, ev, ew].
        This velocity does not interfere with the actual velocity, which
        is neseccery to not disturb the real velocity through artificial
        damping in this step. The ebg velocity is initialized for each
        inner loop again and reset in the outer loop."""
        from math import ceil
        from pysph.base.kernels import CubicSpline
        from pysph.sph.integrator_step import EBGStep
        from compyle.config import get_config
        from pysph.sph.integrator import EulerIntegrator
        from pysph.sph.scheme import BeadChainScheme
        from pysph.sph.equation import Group
        from pysph.sph.fiber.utils import (HoldPoints, Contact,
                                           ComputeDistance)
        from pysph.sph.fiber.beadchain import (Tension, Bending,
                                               ArtificialDamping)
        from pysph.base.nnps import DomainManager, LinkedListNNPS
        from pysph.sph.acceleration_eval import AccelerationEval
        from pysph.sph.sph_compiler import SPHCompiler

        if not isinstance(scheme, BeadChainScheme):
            raise TypeError("Scheme must be BeadChainScheme")

        self.innerloop = innerloop
        self.dt = scheme.dt
        self.fiber_dt = scheme.fiber_dt
        self.domain_updates = updates
        self.steps = steps
        self.D = D
        self.eta0 = scheme.rho0 * scheme.nu

        # if there are more than 1 particles involved, elastic equations are
        # iterated in an inner loop.
        if self.innerloop:
            # second integrator
            # self.fiber_integrator = EulerIntegrator(fiber=EBGStep())
            steppers = {}
            for f in scheme.fibers:
                steppers[f] = EBGStep()
            self.fiber_integrator = EulerIntegrator(**steppers)
            # The type of spline has no influence here. It must be large enough
            # to contain the next particle though.
            kernel = CubicSpline(dim=scheme.dim)
            equations = []
            g1 = []
            for fiber in scheme.fibers:
                g1.append(ComputeDistance(dest=fiber, sources=[fiber]))
            equations.append(Group(equations=g1))

            g2 = []
            for fiber in scheme.fibers:
                g2.append(
                    Tension(dest=fiber, sources=None, ea=scheme.E * scheme.A))
                g2.append(
                    Bending(dest=fiber, sources=None, ei=scheme.E * scheme.Ip))
                g2.append(
                    Contact(dest=fiber,
                            sources=scheme.fibers,
                            E=scheme.E,
                            d=scheme.dx,
                            dim=scheme.dim,
                            k=scheme.k,
                            lim=scheme.lim,
                            eta0=self.eta0))
                g2.append(ArtificialDamping(dest=fiber, sources=None,
                                            d=self.D))
            equations.append(Group(equations=g2))

            g3 = []
            for fiber in scheme.fibers:
                g3.append(HoldPoints(dest=fiber, sources=None, tag=100))
            equations.append(Group(equations=g3))

            # These equations are applied to fiber particles only - that's the
            # reason for computational speed up.
            particles = [p for p in all_particles if p.name in scheme.fibers]
            # A seperate DomainManager is needed to ensure that particles don't
            # leave the domain.
            if domain:
                xmin = domain.manager.xmin
                ymin = domain.manager.ymin
                zmin = domain.manager.zmin
                xmax = domain.manager.xmax
                ymax = domain.manager.ymax
                zmax = domain.manager.zmax
                periodic_in_x = domain.manager.periodic_in_x
                periodic_in_y = domain.manager.periodic_in_y
                periodic_in_z = domain.manager.periodic_in_z
                gamma_yx = domain.manager.gamma_yx
                gamma_zx = domain.manager.gamma_zx
                gamma_zy = domain.manager.gamma_zy
                n_layers = domain.manager.n_layers
                N = self.steps or int(ceil(self.dt / self.fiber_dt))
                # dt = self.dt/N
                self.domain = DomainManager(xmin=xmin,
                                            xmax=xmax,
                                            ymin=ymin,
                                            ymax=ymax,
                                            zmin=zmin,
                                            zmax=zmax,
                                            periodic_in_x=periodic_in_x,
                                            periodic_in_y=periodic_in_y,
                                            periodic_in_z=periodic_in_z,
                                            gamma_yx=gamma_yx,
                                            gamma_zx=gamma_zx,
                                            gamma_zy=gamma_zy,
                                            n_layers=n_layers,
                                            dt=self.dt,
                                            calls_per_step=N)
            else:
                self.domain = None
            # A seperate list for the nearest neighbourhood search is
            # benefitial since it is much smaller than the original one.
            nnps = LinkedListNNPS(dim=scheme.dim,
                                  particles=particles,
                                  radius_scale=kernel.radius_scale,
                                  domain=self.domain,
                                  fixed_h=False,
                                  cache=False,
                                  sort_gids=False)
            # The acceleration evaluator needs to be set up in order to compile
            # it together with the integrator.
            if parallel:
                self.acceleration_eval = AccelerationEval(
                    particle_arrays=particles,
                    equations=equations,
                    kernel=kernel)
            else:
                self.acceleration_eval = AccelerationEval(
                    particle_arrays=particles,
                    equations=equations,
                    kernel=kernel,
                    mode='serial')
            # Compilation of the integrator not using openmp, because the
            # overhead is too large for those few fiber particles.
            comp = SPHCompiler(self.acceleration_eval, self.fiber_integrator)
            if parallel:
                comp.compile()
            else:
                config = get_config()
                config.use_openmp = False
                comp.compile()
                config.use_openmp = True
            self.acceleration_eval.set_nnps(nnps)

            # Connecting neighbourhood list to integrator.
            self.fiber_integrator.set_nnps(nnps)
Ejemplo n.º 21
0
class Interpolator(object):
    """Convenient class to interpolate particle properties onto a uniform
    grid.  This is particularly handy for visualization.
    """

    def __init__(self, particle_arrays, num_points=125000, kernel=None,
                 x=None, y=None, z=None):
        """
        Parameters
        ----------

        particle_arrays: A list of particle arrays.
        num_points: the number of points to interpolate on to.
        kernel: the kernel to use for interpolation.

        x: ndarray: the x-coordinate of points on which to interpolate.
        y: ndarray: the y-coordinate of points on which to interpolate.
        z: ndarray: the z-coordinate of points on which to interpolate.


        The x, y, z coordinates need not be specified, and if they are not,
        the bounds of the interpolated domain is automatically computed and
        `num_points` number of points are used in this domain uniformly placed.

        """
        self._set_particle_arrays(particle_arrays)
        bounds = get_bounding_box(self.particle_arrays)
        shape = get_nx_ny_nz(num_points, bounds)
        self.dim = 3 - list(shape).count(1)

        if kernel is None:
            self.kernel = CubicSpline(dim=self.dim)
        else:
            self.kernel = kernel

        self.pa = None
        self.nnps = None
        self.func_eval = None
        if x is None and y is None and z is None:
            self.set_domain(bounds, shape)
        else:
            self.set_interpolation_points(x=x, y=y, z=z)

    #### Interpolator protocol ################################################
    def set_interpolation_points(self, x=None, y=None, z=None):
        """Set the points on which we must interpolate the arrays.

        Parameters
        -----------

        x: ndarray: the x-coordinate of points on which to interpolate.
        y: ndarray: the y-coordinate of points on which to interpolate.
        z: ndarray: the z-coordinate of points on which to interpolate.

        If any of x, y, z is not passed it is assumed to be 0.0 and shaped
        like the other non-None arrays.

        """
        tmp = None
        for tmp in (x, y, z):
            if tmp is not None:
                break
        if tmp is None:
            raise RuntimeError('At least one non-None array must be given.')

        def _get_array(_t):
            return np.asarray(_t) if _t is not None else np.zeros_like(tmp)

        x, y, z = _get_array(x), _get_array(y), _get_array(z)

        self.shape = x.shape
        self.pa = self._create_particle_array(x, y, z)
        arrays = self.particle_arrays + [self.pa]

        if self.func_eval is None:
            self._compile_acceleration_eval(arrays)

        self.update_particle_arrays(self.particle_arrays)

    def set_domain(self, bounds, shape):
        """Set the domain to interpolate into.

        Parameters:
        -----------

        bounds: (xmin, xmax, ymin, ymax, zmin, zmax)
        shape: (nx, ny, nz)
        """
        self.bounds = np.asarray(bounds)
        self.shape = np.asarray(shape)
        x, y, z = self._create_default_points(self.bounds, self.shape)
        self.set_interpolation_points(x, y, z)

    def interpolate(self, prop, gradient=False):
        """

        :prop: The name of the property to interpolate.

        :gradient: bool: Evaluate gradient and not function.

        :return: A numpy array suitably shaped with the property
        interpolated.
        """
        for array in self.particle_arrays:
            data = array.get(prop, only_real_particles=False)
            array.get('temp_prop', only_real_particles=False)[:] = data

        self.func_eval.compute(0.0, 0.1) # These are junk arguments.
        result = self.pa.prop.copy()
        result.shape = self.shape
        return result.squeeze()

    def update_particle_arrays(self, particle_arrays):
        """Call this for a new set of particle arrays which have the
        same properties as before.

        For example, if you are reading the particle array data from files,
        each time you load a new file a new particle array is read with the
        same properties.  Call this function to reset the arrays.
        """
        self._set_particle_arrays(particle_arrays)
        arrays = self.particle_arrays + [self.pa]
        self._create_nnps(arrays)
        self.func_eval.update_particle_arrays(arrays)

    #### Private protocol #####################################################

    def _create_nnps(self, arrays):
        # create the neighbor locator object
        self.nnps = NNPS(dim=self.kernel.dim, particles=arrays,
                         radius_scale=self.kernel.radius_scale)
        self.nnps.update()
        self.func_eval.set_nnps(self.nnps)

    def _create_default_points(self, bounds, shape):
        b = bounds
        n = shape
        x, y, z = np.mgrid[b[0]:b[1]:n[0]*1j,
                           b[2]:b[3]:n[1]*1j,
                           b[4]:b[5]:n[2]*1j,
                          ]
        return x, y, z

    def _create_particle_array(self, x, y, z):
        xr = x.ravel()
        yr = y.ravel()
        zr = z.ravel()
        self.x, self.y, self.z = x.squeeze(), y.squeeze(), z.squeeze()

        hmax = self._get_max_h_in_arrays()
        h = hmax*np.ones_like(xr)
        prop = np.zeros_like(xr)
        pa = get_particle_array(
            name='interpolate',
            x=xr, y=yr, z=zr, h=h,
            number_density=np.zeros_like(xr),
            prop=prop,
            grad_x=np.zeros_like(xr),
            grad_y=np.zeros_like(xr),
            grad_z=np.zeros_like(xr)
        )
        return pa

    def _compile_acceleration_eval(self, arrays):
        names = [x.name for x in self.particle_arrays]
        equations = [InterpolateFunction(dest='interpolate', sources=names)]
        self.func_eval = AccelerationEval(arrays, equations, self.kernel)
        compiler = SPHCompiler(self.func_eval, None)
        compiler.compile()

    def _get_max_h_in_arrays(self):
        hmax = -1.0
        for array in self.particle_arrays:
            hmax = max(array.h.max(), hmax)
        return hmax

    def _set_particle_arrays(self, particle_arrays):
        self.particle_arrays = particle_arrays
        self._make_all_arrays_have_same_props(particle_arrays)
        for array in self.particle_arrays:
            if 'temp_prop' not in array.properties:
                array.add_property('temp_prop')

    def _make_all_arrays_have_same_props(self, particle_arrays):
        """Make sure all arrays have the same props.
        """
        all_props = reduce(
            set.union, [set(x.properties.keys()) for x in particle_arrays]
        )
        for array in particle_arrays:
            all_props.update(array.properties.keys())

        for array in particle_arrays:
            array_props = set(array.properties.keys())
            for prop in (all_props - array_props):
                array.add_property(prop)
Ejemplo n.º 22
0
class Interpolator(object):
    """Convenient class to interpolate particle properties onto a uniform grid
    or given set of particles.  This is particularly handy for visualization.

    """
    def __init__(self,
                 particle_arrays,
                 num_points=125000,
                 kernel=None,
                 x=None,
                 y=None,
                 z=None,
                 domain_manager=None,
                 equations=None):
        """
        The x, y, z coordinates need not be specified, and if they are not,
        the bounds of the interpolated domain is automatically computed and
        `num_points` number of points are used in this domain uniformly placed.

        Parameters
        ----------

        particle_arrays: list
            A list of particle arrays.
        num_points: int
            the number of points to interpolate on to.
        kernel: Kernel
            the kernel to use for interpolation.
        x: ndarray
            the x-coordinate of points on which to interpolate.
        y: ndarray
            the y-coordinate of points on which to interpolate.
        z: ndarray
            the z-coordinate of points on which to interpolate.
        domain_manager: DomainManager
            An optional Domain manager for periodic domains.
        equations: sequence
            A sequence of equations or groups.  Defaults to None.  This is
            used only if the default interpolation equations are inadequate.
        """
        self._set_particle_arrays(particle_arrays)
        bounds = get_bounding_box(self.particle_arrays)
        shape = get_nx_ny_nz(num_points, bounds)
        self.dim = 3 - list(shape).count(1)

        if kernel is None:
            self.kernel = Gaussian(dim=self.dim)
        else:
            self.kernel = kernel

        self.pa = None
        self.nnps = None
        self.equations = equations
        self.func_eval = None
        self.domain_manager = domain_manager
        if x is None and y is None and z is None:
            self.set_domain(bounds, shape)
        else:
            self.set_interpolation_points(x=x, y=y, z=z)

    #### Interpolator protocol ################################################
    def set_interpolation_points(self, x=None, y=None, z=None):
        """Set the points on which we must interpolate the arrays.

        If any of x, y, z is not passed it is assumed to be 0.0 and shaped
        like the other non-None arrays.


        Parameters
        ----------

        x: ndarray
            the x-coordinate of points on which to interpolate.
        y: ndarray
            the y-coordinate of points on which to interpolate.
        z: ndarray
            the z-coordinate of points on which to interpolate.

        """
        tmp = None
        for tmp in (x, y, z):
            if tmp is not None:
                break
        if tmp is None:
            raise RuntimeError('At least one non-None array must be given.')

        def _get_array(_t):
            return np.asarray(_t) if _t is not None else np.zeros_like(tmp)

        x, y, z = _get_array(x), _get_array(y), _get_array(z)

        self.shape = x.shape
        self.pa = self._create_particle_array(x, y, z)
        arrays = self.particle_arrays + [self.pa]

        if self.func_eval is None:
            self._compile_acceleration_eval(arrays)

        self.update_particle_arrays(self.particle_arrays)

    def set_domain(self, bounds, shape):
        """Set the domain to interpolate into.

        Parameters
        ----------

        bounds: tuple
            (xmin, xmax, ymin, ymax, zmin, zmax)
        shape: tuple
            (nx, ny, nz)
        """
        self.bounds = np.asarray(bounds)
        self.shape = np.asarray(shape)
        x, y, z = self._create_default_points(self.bounds, self.shape)
        self.set_interpolation_points(x, y, z)

    def interpolate(self, prop, gradient=False):
        """Interpolate given property.

        Parameters
        ----------

        prop: str
            The name of the property to interpolate.

        gradient: bool
            Evaluate gradient and not function.

        Returns
        -------
        A numpy array suitably shaped with the property interpolated.
        """
        for array in self.particle_arrays:
            data = array.get(prop, only_real_particles=False)
            array.get('temp_prop', only_real_particles=False)[:] = data

        self.func_eval.compute(0.0, 0.1)  # These are junk arguments.
        result = self.pa.prop.copy()
        result.shape = self.shape
        return result.squeeze()

    def update_particle_arrays(self, particle_arrays):
        """Call this for a new set of particle arrays which have the
        same properties as before.

        For example, if you are reading the particle array data from files,
        each time you load a new file a new particle array is read with the
        same properties.  Call this function to reset the arrays.
        """
        self._set_particle_arrays(particle_arrays)
        arrays = self.particle_arrays + [self.pa]
        self._create_nnps(arrays)
        self.func_eval.update_particle_arrays(arrays)

    #### Private protocol #####################################################

    def _create_nnps(self, arrays):
        # create the neighbor locator object
        self.nnps = NNPS(dim=self.kernel.dim,
                         particles=arrays,
                         radius_scale=self.kernel.radius_scale,
                         domain=self.domain_manager,
                         cache=True)
        self.nnps.update()
        self.func_eval.set_nnps(self.nnps)

    def _create_default_points(self, bounds, shape):
        b = bounds
        n = shape
        x, y, z = np.mgrid[b[0]:b[1]:n[0] * 1j, b[2]:b[3]:n[1] * 1j,
                           b[4]:b[5]:n[2] * 1j, ]
        return x, y, z

    def _create_particle_array(self, x, y, z):
        xr = x.ravel()
        yr = y.ravel()
        zr = z.ravel()
        self.x, self.y, self.z = x.squeeze(), y.squeeze(), z.squeeze()

        hmax = self._get_max_h_in_arrays()
        h = hmax * np.ones_like(xr)
        prop = np.zeros_like(xr)
        pa = get_particle_array(name='interpolate',
                                x=xr,
                                y=yr,
                                z=zr,
                                h=h,
                                number_density=np.zeros_like(xr),
                                prop=prop,
                                grad_x=np.zeros_like(xr),
                                grad_y=np.zeros_like(xr),
                                grad_z=np.zeros_like(xr))
        return pa

    def _compile_acceleration_eval(self, arrays):
        names = [x.name for x in self.particle_arrays]
        if self.equations is None:
            equations = [
                InterpolateFunction(dest='interpolate', sources=names)
            ]
        else:
            equations = self.equations
        self.func_eval = AccelerationEval(arrays, equations, self.kernel)
        compiler = SPHCompiler(self.func_eval, None)
        compiler.compile()

    def _get_max_h_in_arrays(self):
        hmax = -1.0
        for array in self.particle_arrays:
            hmax = max(array.h.max(), hmax)
        return hmax

    def _set_particle_arrays(self, particle_arrays):
        self.particle_arrays = particle_arrays
        self._make_all_arrays_have_same_props(particle_arrays)
        for array in self.particle_arrays:
            if 'temp_prop' not in array.properties:
                array.add_property('temp_prop')

    def _make_all_arrays_have_same_props(self, particle_arrays):
        """Make sure all arrays have the same props.
        """
        all_props = reduce(set.union,
                           [set(x.properties.keys()) for x in particle_arrays])
        for array in particle_arrays:
            all_props.update(array.properties.keys())

        for array in particle_arrays:
            array_props = set(array.properties.keys())
            for prop in (all_props - array_props):
                array.add_property(prop)
Ejemplo n.º 23
0
 def _create_nnps(self, arrays):
     # create the neighbor locator object
     self.nnps = NNPS(dim=self.kernel.dim, particles=arrays,
                      radius_scale=self.kernel.radius_scale)
     self.nnps.update()
     self.func_eval.set_nnps(self.nnps)
Ejemplo n.º 24
0
 def setUp(self):
     PeriodicChannel2DTestCase.setUp(self)
     self.nnps = LinkedListNNPS(dim=2,
                                particles=self.particles,
                                domain=self.domain,
                                radius_scale=self.kernel.radius_scale)