Exemple #1
0
    def __init__(self, particles):
        super(VoronoiMesh2D, self).__init__(particles)

        face_vars = {
            "area": "double",
            "velocity-x": "double",
            "velocity-y": "double",
            "normal-x": "double",
            "normal-y": "double",
            "com-x": "double",
            "com-y": "double",
            "pair-i": "longlong",
            "pair-j": "longlong",
        }
        self.faces = ParticleContainer(var_dict=face_vars)
Exemple #2
0
    def __init__(self, *arg, **kw):
        super(VoronoiMesh2D, self).__init__(*arg, **kw)

        self.dim = 2
        self["neighbors"] = None
        self["number of neighbors"] = None
        self["faces"] = None
        self["voronoi vertices"] = None

        face_vars = {
            "area": "double",
            "velocity-x": "double",
            "velocity-y": "double",
            "normal-x": "double",
            "normal-y": "double",
            "com-x": "double",
            "com-y": "double",
            "pair-i": "longlong",
            "pair-j": "longlong",
        }
        self.faces = ParticleContainer(var_dict=face_vars)
Exemple #3
0
class VoronoiMesh2D(VoronoiMeshBase):
    """
    2d voronoi mesh class
    """
    def __init__(self, *arg, **kw):
        super(VoronoiMesh2D, self).__init__(*arg, **kw)

        self.dim = 2
        self["neighbors"] = None
        self["number of neighbors"] = None
        self["faces"] = None
        self["voronoi vertices"] = None

        face_vars = {
            "area": "double",
            "velocity-x": "double",
            "velocity-y": "double",
            "normal-x": "double",
            "normal-y": "double",
            "com-x": "double",
            "com-y": "double",
            "pair-i": "longlong",
            "pair-j": "longlong",
        }
        self.faces = ParticleContainer(var_dict=face_vars)

    def compute_cell_info(self, particles):
        """
        compute volume and center of mass of all real particles and compute areas, center of mass, normal
        face pairs, and number of faces for faces
        """

        num_faces = mesh.number_of_faces(particles, self["neighbors"],
                                         self["number of neighbors"])
        self.faces.resize(num_faces)

        vol = particles["volume"]
        xcom = particles["com-x"]
        ycom = particles["com-y"]
        vol[:] = 0.0
        xcom[:] = 0.0
        ycom[:] = 0.0

        mesh.cell_face_info_2d(particles, self.faces, self["neighbors"],
                               self["number of neighbors"], self["faces"],
                               self["voronoi vertices"])

    def tessellate(self, particles):
        """
        create 2d voronoi tesselation from particle positions
        """
        # create the tesselation
        vor = Voronoi(particles.T)

        # total number of particles
        num_particles = particles.shape[1]

        # create neighbor and face graph
        neighbor_graph = [[] for i in range(num_particles)]
        face_graph = [[] for i in range(num_particles)]

        # loop through each face collecting the two particles
        # that made that face as well as the face itself
        for i, face in enumerate(vor.ridge_points):

            p1, p2 = face
            neighbor_graph[p1].append(p2)
            neighbor_graph[p2].append(p1)

            face_graph[p1] += vor.ridge_vertices[i]
            face_graph[p2] += vor.ridge_vertices[i]

        # sizes for 1d graphs
        neighbor_graph_sizes = np.array([len(n) for n in neighbor_graph],
                                        dtype=np.int32)

        # graphs in 1d
        neighbor_graph = np.array(list(
            itertools.chain.from_iterable(neighbor_graph)),
                                  dtype=np.int32)
        face_graph = np.array(list(itertools.chain.from_iterable(face_graph)),
                              dtype=np.int32)

        self["neighbors"] = neighbor_graph
        self["number of neighbors"] = neighbor_graph_sizes
        self["faces"] = face_graph
        self["voronoi vertices"] = vor.vertices
Exemple #4
0
if rank == 2:
    num_particles = 3
    x = np.array([4.0, 3.0, 0.0], dtype=np.float64)
    y = np.array([3.0, 3.0, 4.0], dtype=np.float64)

    gid = np.array([18, 19, 20], dtype=np.int32)

if rank == 3:
    num_particles = 4
    x = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float64)
    y = np.array([4.0, 4.0, 4.0, 4.0], dtype=np.float64)

    gid = np.array([21, 22, 23, 24], dtype=np.int32)

# create particle data structure
pc = ParticleContainer(num_particles)
pc['position-x'][:] = x
pc['position-y'][:] = y

pc.register_property(x.size, 'gid', 'long')
pc['gid'][:] = gid

# Gather the global data on root
X = np.zeros(shape=25, dtype=np.float64)
Y = np.zeros(shape=25, dtype=np.float64)
GID = np.zeros(shape=25, dtype=np.int32)

displacements = np.array([12, 6, 3, 4], dtype=np.int32)

comm.Gatherv(sendbuf=x, recvbuf=[X, (displacements, None)], root=0)
comm.Gatherv(sendbuf=y, recvbuf=[Y, (displacements, None)], root=0)
Exemple #5
0
    def update_ghost_particles(self, particles, mesh, domain, load_balance, comm):

        rank = comm.Get_rank()
        size = comm.Get_size()

        # allocate arrays for boundary indices
        indices = LongArray()
        corner_ghost = ParticleContainer()

        send_particles = np.zeros(size, dtype=np.int32)
        recv_particles = np.zeros(size, dtype=np.int32)

        # we are puting new ghost at the end of the array
        current_size = particles.get_number_of_particles()

        boundary_indices = {
                "left"   : LongArray(),
                "right"  : LongArray(),
                "bottom" : LongArray(),
                "top"    : LongArray(),
                "left-top"     : LongArray(),
                "left-bottom"  : LongArray(),
                "right-top"    : LongArray(),
                "right-bottom" : LongArray()
                }

        # relabel all particles
        particles["tag"][:]  = ParticleTAGS.Undefined
        particles["type"][:] = ParticleTAGS.Undefined

        # flag particles that have left the domain and particles
        # that remained
        load_balance.flag_migrate_particles(particles, rank)

        # find particles that have left the domain
        export_indices = np.where(particles["type"] == ParticleTAGS.ExportInterior)[0]

        if export_indices.size > 0:

            # extract export particles 
            export_particles = particles.extract_items(export_indices)

            # put particles in process order
            ind = np.argsort(export_particles["process"])
            for field in export_particles.properties.keys():
                array = export_particles[field]
                array[:] = array[ind]

            export_particles["tag"][:]  = ParticleTAGS.Real
            export_particles["type"][:] = ParticleTAGS.Undefined

        else:
            export_particles = ParticleContainer()

        # bin particle process
        send_particles[:] = np.bincount(export_particles["process"], minlength=size)

        # how many particles are being sent from each process
        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)

        # create container for incoming particles 
        import_particles = ParticleContainer(np.sum(recv_particles))

        exchange_particles(import_particles, export_particles, send_particles, recv_particles,
                0, comm)

        # copy import particle data to ghost place holders and turn to real particles
        migrate.transfer_migrate_particles(particles, import_particles)

        # flag export particles back to interior ghost particles
        particles["type"][export_indices] = ParticleTAGS.Interior

        ghost_indices = np.where(particles["tag"] == ParticleTAGS.OldGhost)[0]

        # find indices of interior/exterior ghost particles 
        cumsum_neighbors = mesh["number of neighbors"].cumsum()
        exterior_ghost_indices = np.where(particles["type"] == ParticleTAGS.Exterior)[0]
        interior_ghost_indices = np.where(particles["type"] == ParticleTAGS.Interior)[0]

        #---------- create exterior ghost particles ----------#
        num_exterior_ghost = 0
        if exterior_ghost_indices.size > 0:

            num_exterior_ghost = create_reflect_ghost(particles, boundary_indices,
                    domain, exterior_ghost_indices, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, -1)

        #---------- create interior ghost particles ----------#
        send_particles[:] = 0
        recv_particles[:] = 0
        interior_ghost_proc = particles["process"][interior_ghost_indices]

        # arrange particles in process order
        ind = interior_ghost_proc.argsort()
        interior_ghost_proc = interior_ghost_proc[ind]
        interior_ghost_indices = interior_ghost_indices[ind]

        # bin processors
        interior_ghost_proc_bin = np.bincount(interior_ghost_proc, minlength=size)

        cumsum_neighbors = mesh["number of neighbors"].cumsum()

        # collect the indices of particles to be export to each process
        cumsum_proc = interior_ghost_proc_bin.cumsum()
        for proc in range(size):
            if interior_ghost_proc_bin[proc] != 0:

                start = cumsum_proc[proc] - interior_ghost_proc_bin[proc]
                end   = cumsum_proc[proc]

                send_particles[proc] = find_boundary_particles(indices, interior_ghost_indices[start:end], ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, False)

        # extract data to send and remove the particles
        send_data = {}
        for prop in particles.properties.keys():
            send_data[prop] = np.ascontiguousarray(particles[prop][indices.get_npy_array()])
        send_data["tag"][:] = ParticleTAGS.Ghost

        # how many particles are being sent from each process
        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
        num_interior_ghost = np.sum(recv_particles)

        # resize arrays to give room for incoming particles
        sp = particles.get_number_of_particles()
        particles.extend(num_interior_ghost)

        exchange_particles(particles, send_data, send_particles, recv_particles,
                sp, comm)

        #---------- create exterior corner ghost particles ----------#
        indices.reset()
        send_particles[:] = 0
        recv_particles[:] = 0

        if boundary_indices['left'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["left"], indices, send_particles, 'x', domain.xmin, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['right'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["right"], indices, send_particles, 'x', domain.xmax, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['bottom'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["bottom"], indices, send_particles, 'y', domain.ymin, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        if boundary_indices['top'].length > 0:
            export_reflect(particles, corner_ghost, boundary_indices["top"], indices, send_particles, 'y', domain.ymax, ghost_indices,
                    mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

        comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
        sp = particles.get_number_of_particles()
        particles.extend(np.sum(recv_particles))

        # to export corners have to be reorderd by process
        if corner_ghost.get_number_of_particles() > 0:

            ind = np.argsort(corner_ghost['process'])
            for field in corner_ghost.properties.keys():
                array = corner_ghost[field]
                array[:] = array[ind]

            corner_ghost["process"][:] = -1

        # exchange patch corners
        exchange_particles(particles, corner_ghost, send_particles, recv_particles,
                sp, comm)

        # finally remove old ghost particles from previous time step
        # and also put real particles in front and ghost in the back
        particles.remove_tagged_particles(ParticleTAGS.OldGhost)
        particles.align_particles()

        particles['type'][:] = ParticleTAGS.Undefined
Exemple #6
0
    def create_ghost_particles(self, particles, mesh, domain, load_balance, comm, iteration=6):
        """Create initial ghost particles that hug the boundary after
        load balance
        """
        rank = comm.Get_rank()
        size = comm.Get_size()

        # remove current (if any) ghost particles
        particles.remove_tagged_particles(ParticleTAGS.Ghost)
        current_size = particles.get_number_of_particles()

        # create initial ghost particles, particles is now larger
        # these particles are centered in neighboring boundary leaf
        # cells of the octree
        load_balance.create_boundary_particles(particles, rank)

        # reorder ghost in processors order: exterior have a process id of -1 so
        # their put before interior ghost particles
        ghost_proc = np.array(particles["process"][current_size:])
        ind = np.argsort(ghost_proc)
        ghost_proc = ghost_proc[ind]

        for field in particles.properties.keys():
            array = particles[field][current_size:]
            array[:] = array[ind]

        # allocate arrays for boundary indices
        indices = LongArray()
        corner_ghost = ParticleContainer()

        # sides
        boundary_indices = {
                "left"   : LongArray(),
                "right"  : LongArray(),
                "bottom" : LongArray(),
                "top"    : LongArray(),
                "left-top"     : LongArray(),
                "left-bottom"  : LongArray(),
                "right-top"    : LongArray(),
                "right-bottom" : LongArray()
                }

        send_particles = np.zeros(size, dtype=np.int32)
        recv_particles = np.zeros(size, dtype=np.int32)

        # create ghost interior and exterior particles by iteration, using
        # the mesh to extract the needed neighbors
        for i in range(iteration):

            # build the mesh
            mesh.tessellate()
            cumsum_neighbors = mesh["number of neighbors"].cumsum()

            #---------- create exterior ghost particles ----------#

            # create indices for ghost particles
            ghost_indices = np.arange(current_size, particles.get_number_of_particles())

            # label current ghost as old ghost
            particles['tag'][ghost_indices] = ParticleTAGS.OldGhost

            # select exterior ghost particles
            exterior_ghost = ghost_proc == -1
            exterior_ghost_indices = ghost_indices[exterior_ghost]

            if np.sum(exterior_ghost_indices) > 0:

                num_exterior_ghost = create_reflect_ghost(particles, boundary_indices,
                        domain, exterior_ghost_indices, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, -1)

            #---------- create interior ghost particles ----------#
            interior_ghost_indices = ghost_indices[~exterior_ghost]
            interior_ghost_proc = ghost_proc[~exterior_ghost]

            # bin processors - they are in order
            interior_ghost_proc_bin = np.bincount(interior_ghost_proc, minlength=size)

            send_particles[:] = 0
            recv_particles[:] = 0
            indices.reset()

            # collect the indices of particles to be export to each process
            cumsum_proc = interior_ghost_proc_bin.cumsum()
            for proc in range(size):
                if interior_ghost_proc_bin[proc] != 0:

                    start = cumsum_proc[proc] - interior_ghost_proc_bin[proc]
                    end   = cumsum_proc[proc]

                    send_particles[proc] = find_boundary_particles(indices, interior_ghost_indices[start:end], ghost_indices,
                            mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, False)

            # extract data to send and remove the particles
            send_data = {}
            for prop in particles.properties.keys():
                send_data[prop] = np.ascontiguousarray(particles[prop][indices.get_npy_array()])
            send_data["tag"][:] = ParticleTAGS.Ghost

            # how many particles are being sent from each process
            comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
            num_interior_ghost = np.sum(recv_particles)

            # resize arrays to give room for incoming particles
            sp = particles.get_number_of_particles()
            #particles.resize(current_size + num_exterior_ghost + num_interior_ghost)
            particles.extend(num_interior_ghost)

            exchange_particles(particles, send_data, send_particles, recv_particles,
                    sp, comm)

            #---------- create exterior corner ghost particles ----------#
            indices.reset()
            send_particles[:] = 0
            recv_particles[:] = 0

            # clear out corner ghost
            corner_ghost.resize(0)

            if boundary_indices['left'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["left"], indices, send_particles, 'x', domain.xmin, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['right'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["right"], indices, send_particles, 'x', domain.xmax, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['bottom'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["bottom"], indices, send_particles, 'y', domain.ymin, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            if boundary_indices['top'].length > 0:
                export_reflect(particles, corner_ghost, boundary_indices["top"], indices, send_particles, 'y', domain.ymax, ghost_indices,
                        mesh['neighbors'], mesh['number of neighbors'], cumsum_neighbors, load_balance, current_size, rank, size)

            #print rank, current_size, particles.get_number_of_particles(), current_size + num_exterior_ghost + num_interior_ghost

            sp = particles.get_number_of_particles()
            comm.Alltoall(sendbuf=send_particles, recvbuf=recv_particles)
            particles.extend(np.sum(recv_particles))

            # to export corners have to be reorderd by process
            if corner_ghost.get_number_of_particles() > 0:

                ind = np.argsort(corner_ghost['process'])
                for field in corner_ghost.properties.keys():
                    array = corner_ghost[field]
                    array[:] = array[ind]

                corner_ghost["process"][:] = -1

            # exchange patch corners
            exchange_particles(particles, corner_ghost, send_particles, recv_particles,
                    sp, comm)

            for bd in boundary_indices:
                boundary_indices[bd].reset()

            particles.remove_tagged_particles(ParticleTAGS.OldGhost)

            # put particles in process order for next loop
            ind = np.argsort(particles["process"][current_size:])
            for field in particles.properties.keys():
                array = particles[field][current_size:]
                array[:] = array[ind]

            ghost_proc = np.array(particles["process"][current_size:])

        print 'rank:', rank, 'fraction of real to ghost:', (particles.get_number_of_particles()-current_size)*1.0/particles.get_number_of_particles()
Exemple #7
0
class VoronoiMesh2D(VoronoiMeshBase):
    """
    2d voronoi mesh class
    """
    def __init__(self, particles):
        super(VoronoiMesh2D, self).__init__(particles)

        face_vars = {
            "area": "double",
            "velocity-x": "double",
            "velocity-y": "double",
            "normal-x": "double",
            "normal-y": "double",
            "com-x": "double",
            "com-y": "double",
            "pair-i": "longlong",
            "pair-j": "longlong",
        }
        self.faces = ParticleContainer(var_dict=face_vars)

    def compute_cell_info(self):
        """
        compute volume and center of mass of all real particles and compute areas, center of mass, normal
        face pairs, and number of faces for faces
        """

        num_faces = mesh.number_of_faces(self.particles,
                                         self.graph["neighbors"],
                                         self.graph["number of neighbors"])
        self.faces.resize(num_faces)

        # the algorithms are cumulative so we have to zero out the data
        self.particles["volume"][:] = 0.0
        self.particles["com-x"][:] = 0.0
        self.particles["com-y"][:] = 0.0

        mesh.cell_face_info_2d(self.particles, self.faces,
                               self.graph["neighbors"],
                               self.graph["number of neighbors"],
                               self.graph["faces"],
                               self.graph["voronoi vertices"])

    def update_boundary_particles(self):
        cumsum = np.cumsum(self.graph["number of neighbors"], dtype=np.int32)
        mesh.flag_boundary_particles(self.particles, self.graph["neighbors"],
                                     self.graph["number of neighbors"], cumsum)

    def update_second_boundary_particles(self):
        cumsum = np.cumsum(self.graph["number of neighbors"], dtype=np.int32)
        mesh.flag_second_boundary_particles(self.particles,
                                            self.graph["neighbors"],
                                            self.graph["number of neighbors"],
                                            cumsum)

    def tessellate(self):
        """
        create 2d voronoi tesselation from particle positions
        """
        pos = np.array(
            [self.particles["position-x"], self.particles["position-y"]],
            dtype=np.float64)

        # create the tesselation
        vor = Voronoi(pos.T)

        # total number of particles
        num_particles = self.particles.get_number_of_particles()

        # create neighbor and face graph
        neighbor_graph = [[] for i in range(num_particles)]
        face_graph = [[] for i in range(num_particles)]

        # loop through each face collecting the two particles
        # that made that face as well as the face itself
        for i, face in enumerate(vor.ridge_points):

            p1, p2 = face
            neighbor_graph[p1].append(p2)
            neighbor_graph[p2].append(p1)

            face_graph[p1] += vor.ridge_vertices[i]
            face_graph[p2] += vor.ridge_vertices[i]

        # sizes for 1d graphs
        neighbor_graph_sizes = np.array([len(n) for n in neighbor_graph],
                                        dtype=np.int32)

        # graphs in 1d
        neighbor_graph = np.array(list(
            itertools.chain.from_iterable(neighbor_graph)),
                                  dtype=np.int32)
        face_graph = np.array(list(itertools.chain.from_iterable(face_graph)),
                              dtype=np.int32)

        self.graph["neighbors"] = neighbor_graph
        self.graph["number of neighbors"] = neighbor_graph_sizes
        self.graph["faces"] = face_graph
        self.graph["voronoi vertices"] = vor.vertices

    def build_geometry(self, gamma):

        pc = self.particles

        self.tessellate()
        self.update_boundary_particles()
        self.update_second_boundary_particles()  #tmp delete
        self.compute_cell_info()

        indices = np.where((pc['tag'] == ParticleTAGS.Real)
                           | (pc['type'] == ParticleTAGS.Boundary)
                           | (pc['type'] == ParticleTAGS.BoundarySecond))[0]

        # now update primitive variables
        vol = pc['volume'][indices]

        mass = pc['mass'][indices]
        momx = pc['momentum-x'][indices]
        momy = pc['momentum-y'][indices]
        ener = pc['energy'][indices]

        # update primitive variables
        pc['density'][indices] = mass / vol
        pc['velocity-x'][indices] = momx / mass
        pc['velocity-y'][indices] = momy / mass
        pc['pressure'][indices] = (ener / vol - 0.5 * (mass / vol) *
                                   ((momx / mass)**2 +
                                    (momy / mass)**2)) * (gamma - 1.0)
Exemple #8
0
    gid = np.arange(90, 90 + num_particles, dtype=np.int32)
    #gid = np.array( [18, 19, 20], dtype=np.int32 )


if rank == 3:
    num_particles = 4*5
    x = np.array( 5*[1.0, 2.0, 3.0, 4.0], dtype=np.float64)
    y = np.array( 5*[4.0, 4.0, 4.0, 4.0], dtype=np.float64)
    z = np.repeat(np.arange(5), 4).astype(np.float64)

    gid = np.arange(105, 105 + num_particles, dtype=np.int32)
    #gid = np.array( [21, 22, 23, 24], dtype=np.int32 )


# create particle data structure
pa = ParticleContainer(num_particles)
pa.register_property(num_particles, 'position-z', 'double')
pa['position-x'][:] = x
pa['position-y'][:] = y
pa['position-z'][:] = z

pa.register_property(x.size, 'gid', 'long')
pa['gid'][:] = gid

# Gather the global data on root
X   = np.zeros(shape=125, dtype=np.float64)
Y   = np.zeros(shape=125, dtype=np.float64)
Z   = np.zeros(shape=125, dtype=np.float64)
GID = np.zeros(shape=125, dtype=np.int32)

displacements = 5*np.array([12, 6, 3, 4], dtype=np.int32)
Exemple #9
0
    ys = None
    ids = None
    lengths = None
    disp = None
    send = None

send = comm.scatter(send, root=0)
xlocal = np.empty(send,dtype=np.float64)
ylocal = np.empty(send,dtype=np.float64)
idlocal = np.empty(send,dtype=np.int32)

comm.Scatterv( [xs, (lengths, disp)], xlocal)
comm.Scatterv( [ys, (lengths, disp)], ylocal)
comm.Scatterv( [ids, (lengths, disp)], idlocal)

pc = ParticleContainer(send)
pc['position-x'][:] = xlocal
pc['position-y'][:] = ylocal
pc['process'][:] = rank
pc['type'][:] = ParticleTAGS.Undefined

#pc.register_property(send, "ids", "long")
pc["ids"][:] = idlocal

# perform the load decomposition
order = 21
dom = DomainLimits(dim=2, xmin=0., xmax=1.)
load_b = LoadBalance(pc, dom, comm=comm, order=order)
load_b.decomposition()

mesh = VoronoiMesh2D(pc)
Exemple #10
0

comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()

# generate random particles in a unit box for each process
np.random.seed(rank)
my_num_particles = np.random.randint(64, 256)
#my_num_particles = np.random.randint(512, 1024)
my_particles = np.random.random(2 * my_num_particles).reshape(
    2, my_num_particles).astype(np.float64)
#my_particles = np.random.normal(0.5, 0.1, 2*my_num_particles).reshape(2, my_num_particles).astype(np.float64)

#pa = ParticleArray(my_num_particles)
pa = ParticleContainer(my_num_particles)
pa['position-x'][:] = my_particles[0, :]
pa['position-y'][:] = my_particles[1, :]
pa['process'][:] = rank
pa['type'][:] = ParticleTAGS.Undefined

#plot initial distribuition
#plt.scatter(pc['position-x'], pc['position-y'])
#plt.savefig("plot_init_proc_%d.png" % rank)
#plt.clf()

# perform load balance
#lb = LoadBalance(pc, comm=comm)
#global_tree = lb.decomposition()

# perform the load decomposition