示例#1
0
    def create_ghost_particles(self, particles, mesh, domain, load_balance, comm, iteration=6):
        """Create initial ghost particles that hug the boundary after
        load balance
        """
        rank = comm.Get_rank()
        size = comm.Get_size()

        # remove current (if any) ghost particles
        particles.remove_tagged_particles(ParticleTAGS.Ghost)
        current_size = particles.get_number_of_particles()

        # create initial ghost particles, particles is now larger
        # these particles are centered in neighboring boundary leaf
        # cells of the octree
        load_balance.create_boundary_particles(particles, rank)

        # reorder ghost in processors order: exterior have a process id of -1 so
        # their put before interior ghost particles
        ghost_proc = np.array(particles["process"][current_size:])
        ind = np.argsort(ghost_proc)
        ghost_proc = ghost_proc[ind]

        for field in particles.properties.keys():
            array = particles[field][current_size:]
            array[:] = array[ind]

        # allocate arrays for boundary indices
        indices = LongArray()
        corner_ghost = ParticleContainer()

        # sides
        boundary_indices = {
                "left"   : LongArray(),
                "right"  : LongArray(),
                "bottom" : LongArray(),
                "top"    : LongArray(),
                "left-top"     : LongArray(),
                "left-bottom"  : LongArray(),
                "right-top"    : LongArray(),
                "right-bottom" : LongArray()
                }

        send_particles = np.zeros(size, dtype=np.int32)
        recv_particles = np.zeros(size, dtype=np.int32)

        # create ghost interior and exterior particles by iteration, using
        # the mesh to extract the needed neighbors
        for i in range(iteration):

            # build the mesh
            mesh.tessellate()
            cumsum_neighbors = mesh["number of neighbors"].cumsum()

            #---------- create exterior ghost particles ----------#

            # create indices for ghost particles
            ghost_indices = np.arange(current_size, particles.get_number_of_particles())

            # label current ghost as old ghost
            particles['tag'][ghost_indices] = ParticleTAGS.OldGhost

            # select exterior ghost particles
            exterior_ghost = ghost_proc == -1
            exterior_ghost_indices = ghost_indices[exterior_ghost]

            if np.sum(exterior_ghost_indices) > 0:

                num_exterior_ghost = create_reflect_ghost(particles, boundary_indices,
                        domain, exterior_ghost_indices, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, -1)

            #---------- create interior ghost particles ----------#
            interior_ghost_indices = ghost_indices[~exterior_ghost]
            interior_ghost_proc = ghost_proc[~exterior_ghost]

            # bin processors - they are in order
            interior_ghost_proc_bin = np.bincount(interior_ghost_proc, minlength=size)

            send_particles[:] = 0
            recv_particles[:] = 0
            indices.reset()

            # collect the indices of particles to be export to each process
            cumsum_proc = interior_ghost_proc_bin.cumsum()
            for proc in range(size):
                if interior_ghost_proc_bin[proc] != 0:

                    start = cumsum_proc[proc] - interior_ghost_proc_bin[proc]
                    end   = cumsum_proc[proc]

                    send_particles[proc] = find_boundary_particles(indices, interior_ghost_indices[start:end], ghost_indices,
                            mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, False)

            # extract data to send and remove the particles
            send_data = {}
            for prop in particles.properties.keys():
                send_data[prop] = np.ascontiguousarray(particles[prop][indices.get_npy_array()])
            send_data["tag"][:] = ParticleTAGS.Ghost

            # how many particles are being sent from each process
            comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
            num_interior_ghost = np.sum(recv_particles)

            # resize arrays to give room for incoming particles
            sp = particles.get_number_of_particles()
            #particles.resize(current_size + num_exterior_ghost + num_interior_ghost)
            particles.extend(num_interior_ghost)

            exchange_particles(particles, send_data, send_particles, recv_particles,
                    sp, comm)

            #---------- create exterior corner ghost particles ----------#
            indices.reset()
            send_particles[:] = 0
            recv_particles[:] = 0

            # clear out corner ghost
            corner_ghost.resize(0)

            if boundary_indices['left'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["left"], indices, send_particles, 'x', domain.xmin, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['right'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["right"], indices, send_particles, 'x', domain.xmax, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['bottom'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["bottom"], indices, send_particles, 'y', domain.ymin, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['top'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["top"], indices, send_particles, 'y', domain.ymax, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            #print rank, current_size, particles.get_number_of_particles(), current_size + num_exterior_ghost + num_interior_ghost

            sp = particles.get_number_of_particles()
            comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
            particles.extend(np.sum(recv_particles))

            # to export corners have to be reorderd by process
            if corner_ghost.get_number_of_particles() > 0:

                ind = np.argsort(corner_ghost['process'])
                for field in corner_ghost.properties.keys():
                    array = corner_ghost[field]
                    array[:] = array[ind]

                corner_ghost["process"][:] = -1

            # exchange patch corners
            exchange_particles(particles, corner_ghost, send_particles, recv_particles,
                    sp, comm)

            for bd in boundary_indices:
                boundary_indices[bd].reset()

            particles.remove_tagged_particles(ParticleTAGS.OldGhost)

            # put particles in process order for next loop
            ind = np.argsort(particles["process"][current_size:])
            for field in particles.properties.keys():
                array = particles[field][current_size:]
                array[:] = array[ind]

            ghost_proc = np.array(particles["process"][current_size:])

        print 'rank:', rank, 'fraction of real to ghost:', (particles.get_number_of_particles()-current_size)*1.0/particles.get_number_of_particles()
示例#2
0
    def update_ghost_particles(self, particles, mesh, domain, load_balance, comm):

        rank = comm.Get_rank()
        size = comm.Get_size()

        # allocate arrays for boundary indices
        indices = LongArray()
        corner_ghost = ParticleContainer()

        send_particles = np.zeros(size, dtype=np.int32)
        recv_particles = np.zeros(size, dtype=np.int32)

        # we are puting new ghost at the end of the array
        current_size = particles.get_number_of_particles()

        boundary_indices = {
                "left"   : LongArray(),
                "right"  : LongArray(),
                "bottom" : LongArray(),
                "top"    : LongArray(),
                "left-top"     : LongArray(),
                "left-bottom"  : LongArray(),
                "right-top"    : LongArray(),
                "right-bottom" : LongArray()
                }

        # relabel all particles
        particles["tag"][:]  = ParticleTAGS.Undefined
        particles["type"][:] = ParticleTAGS.Undefined

        # flag particles that have left the domain and particles
        # that remained
        load_balance.flag_migrate_particles(particles, rank)

        # find particles that have left the domain
        export_indices = np.where(particles["type"] == ParticleTAGS.ExportInterior)[0]

        if export_indices.size > 0:

            # extract export particles 
            export_particles = particles.extract_items(export_indices)

            # put particles in process order
            ind = np.argsort(export_particles["process"])
            for field in export_particles.properties.keys():
                array = export_particles[field]
                array[:] = array[ind]

            export_particles["tag"][:]  = ParticleTAGS.Real
            export_particles["type"][:] = ParticleTAGS.Undefined

        else:
            export_particles = ParticleContainer()

        # bin particle process
        send_particles[:] = np.bincount(export_particles["process"], minlength=size)

        # how many particles are being sent from each process
        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)

        # create container for incoming particles 
        import_particles = ParticleContainer(np.sum(recv_particles))

        exchange_particles(import_particles, export_particles, send_particles, recv_particles,
                0, comm)

        # copy import particle data to ghost place holders and turn to real particles
        migrate.transfer_migrate_particles(particles, import_particles)

        # flag export particles back to interior ghost particles
        particles["type"][export_indices] = ParticleTAGS.Interior

        ghost_indices = np.where(particles["tag"] == ParticleTAGS.OldGhost)[0]

        # find indices of interior/exterior ghost particles 
        cumsum_neighbors = mesh["number of neighbors"].cumsum()
        exterior_ghost_indices = np.where(particles["type"] == ParticleTAGS.Exterior)[0]
        interior_ghost_indices = np.where(particles["type"] == ParticleTAGS.Interior)[0]

        #---------- create exterior ghost particles ----------#
        num_exterior_ghost = 0
        if exterior_ghost_indices.size > 0:

            num_exterior_ghost = create_reflect_ghost(particles, boundary_indices,
                    domain, exterior_ghost_indices, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, -1)

        #---------- create interior ghost particles ----------#
        send_particles[:] = 0
        recv_particles[:] = 0
        interior_ghost_proc = particles["process"][interior_ghost_indices]

        # arrange particles in process order
        ind = interior_ghost_proc.argsort()
        interior_ghost_proc = interior_ghost_proc[ind]
        interior_ghost_indices = interior_ghost_indices[ind]

        # bin processors
        interior_ghost_proc_bin = np.bincount(interior_ghost_proc, minlength=size)

        cumsum_neighbors = mesh["number of neighbors"].cumsum()

        # collect the indices of particles to be export to each process
        cumsum_proc = interior_ghost_proc_bin.cumsum()
        for proc in range(size):
            if interior_ghost_proc_bin[proc] != 0:

                start = cumsum_proc[proc] - interior_ghost_proc_bin[proc]
                end   = cumsum_proc[proc]

                send_particles[proc] = find_boundary_particles(indices, interior_ghost_indices[start:end], ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, False)

        # extract data to send and remove the particles
        send_data = {}
        for prop in particles.properties.keys():
            send_data[prop] = np.ascontiguousarray(particles[prop][indices.get_npy_array()])
        send_data["tag"][:] = ParticleTAGS.Ghost

        # how many particles are being sent from each process
        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
        num_interior_ghost = np.sum(recv_particles)

        # resize arrays to give room for incoming particles
        sp = particles.get_number_of_particles()
        particles.extend(num_interior_ghost)

        exchange_particles(particles, send_data, send_particles, recv_particles,
                sp, comm)

        #---------- create exterior corner ghost particles ----------#
        indices.reset()
        send_particles[:] = 0
        recv_particles[:] = 0

        if boundary_indices['left'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["left"], indices, send_particles, 'x', domain.xmin, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['right'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["right"], indices, send_particles, 'x', domain.xmax, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['bottom'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["bottom"], indices, send_particles, 'y', domain.ymin, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['top'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["top"], indices, send_particles, 'y', domain.ymax, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
        sp = particles.get_number_of_particles()
        particles.extend(np.sum(recv_particles))

        # to export corners have to be reorderd by process
        if corner_ghost.get_number_of_particles() > 0:

            ind = np.argsort(corner_ghost['process'])
            for field in corner_ghost.properties.keys():
                array = corner_ghost[field]
                array[:] = array[ind]

            corner_ghost["process"][:] = -1

        # exchange patch corners
        exchange_particles(particles, corner_ghost, send_particles, recv_particles,
                sp, comm)

        # finally remove old ghost particles from previous time step
        # and also put real particles in front and ghost in the back
        particles.remove_tagged_particles(ParticleTAGS.OldGhost)
        particles.align_particles()

        particles['type'][:] = ParticleTAGS.Undefined
示例#3
0
dom = DomainLimits(dim=3, xmin=0., xmax=4.)
order = 3
load_b = LoadBalance3D(pa, dom, comm=comm, factor=1.0, order=order)
load_b.decomposition()

# make sure every key has been accounted for
assert(load_b.keys.size == num_particles)

# make sure hilbert keys are valid 
dim = 3
total_keys = 1 << (order*dim)
for i in range(num_particles):
    assert(load_b.keys[i] >= 0 and load_b.keys[i] < total_keys)

# make sure that all particles are accounted in building the
# global tree
assert(load_b.global_num_real_particles == 125)

# the work is just the number of particles in each leaf
# so the sum of each leaf should be the total number of particles
assert(np.sum(load_b.global_work) == 125)

# the particle array should only have real particles
assert(pa.num_real_particles == pa.get_number_of_particles())

for i in range(pa.get_number_of_particles()):
    assert(abs(X[pa['gid'][i]] - pa['position-x'][i]) < 1e-15)
    assert(abs(Y[pa['gid'][i]] - pa['position-y'][i]) < 1e-15)
    assert(abs(Z[pa['gid'][i]] - pa['position-z'][i]) < 1e-15)
    assert(GID[pa['gid'][i]] == pa['gid'][i])