示例#1
0
    def _ao(self):
        """Application Ordering to relate FiPy matrix rows to PETSc matrix rows
        
        FiPy naturally blocks matrix rows, one set of Equations (or Variables) at a time.
        PETSc requires that all rows pertaining to a particular MPI node be contiguous.
        This PETSc `AO` (Application Ordering) object converts between them.
        
        Only needed for FiPy to PETSc. We can efficiently slice from PETSc to
        FiPy, but PETSc requires us to know the row IDs. 
        """
        if not hasattr(self, "_ao_"):
            comm = self.mesh.communicator

            from mpi4py import MPI

            fipyIDs = self._globalNonOverlappingColIDs
            N = len(fipyIDs)

            count = numerix.zeros((comm.Nproc, ), dtype=int)
            count[comm.procID] = N
            comm.mpi4py_comm.Allreduce(sendbuf=MPI.IN_PLACE,
                                       recvbuf=count,
                                       op=MPI.MAX)

            petscIDs = numerix.arange(N) + numerix.sum(count[:comm.procID])

            self._ao_ = PETSc.AO().createBasic(petsc=petscIDs.astype('int32'),
                                               app=fipyIDs.astype('int32'),
                                               comm=comm.petsc4py_comm)
        return self._ao_
示例#2
0
文件: framework.py 项目: afcarl/CMF
    def _setup_6of7_scatters_declare(self):
        """ Defines a scatter for args at this system's level """
        var_sizes = self.var_sizes
        arg_sizes = self.arg_sizes
        iproc = self.comm.rank
        linspace = self._setup_6of7_scatters_linspace
        create = self._setup_6of7_scatters_create

        app_indices = []
        for ivar in xrange(len(self.variables)):
            start = numpy.sum(var_sizes[:, :ivar]) + \
                numpy.sum(var_sizes[:iproc, ivar])
            end = start + var_sizes[iproc, ivar]
            app_indices.append(linspace(start, end))
        app_indices = numpy.concatenate(app_indices)

        start = numpy.sum(var_sizes[:iproc, :])
        end = numpy.sum(var_sizes[:iproc + 1, :])
        petsc_indices = linspace(start, end)

        app_ind_set = PETSc.IS().createGeneral(app_indices, comm=self.comm)
        petsc_ind_set = PETSc.IS().createGeneral(petsc_indices, comm=self.comm)
        self.app_ordering = PETSc.AO().createBasic(app_ind_set,
                                                   petsc_ind_set,
                                                   comm=self.comm)

        var_full = []
        arg_full = []
        start, end = numpy.sum(arg_sizes[:iproc]), numpy.sum(arg_sizes[:iproc])
        for subsystem in self.subsystems['global']:
            var_partial = []
            arg_partial = []
            for elemsystem in subsystem.subsystems['elem']:
                args = elemsystem.arguments
                for arg in args:
                    if arg not in subsystem.variables and \
                            arg in self.variables:
                        ivar = self.variables.keys().index(arg)
                        var_inds = numpy.sum(var_sizes[:, :ivar]) + args[arg]

                        end += args[arg].shape[0]
                        arg_inds = linspace(start, end)
                        start += args[arg].shape[0]

                        var_partial.append(var_inds)
                        arg_partial.append(arg_inds)
                        var_full.append(var_inds)
                        arg_full.append(arg_inds)
            subsystem.scatter_partial = create(var_partial, arg_partial)

        self.scatter_full = create(var_full, arg_full)
示例#3
0
    def SelectBlock(self,surface = None):

        if surface is None:
            surface = self.surface
        comm = self.comm
        numTotalBlock = self.M**self.Dim
        numBlockAssigned = numTotalBlock // comm.size + int(comm.rank < (numTotalBlock % comm.size))
        Blockstart = comm.exscan(numBlockAssigned)
        if comm.rank == 0:Blockstart = 0
        indBlock = np.arange(Blockstart,Blockstart+numBlockAssigned)
        subBlock = self.BlockInd2SubWithoutBand(indBlock)
        BlockCenterCar = self.BlockSub2CenterCarWithoutBand(subBlock)
        cp,_,_,_ = surface.cp(BlockCenterCar)
        dBlockCenter = self.norm1(cp-BlockCenterCar)
        p = self.interpDegree
        if p % 2 == 1:
            p = ( p + 1 ) / 2
        else:
            p = ( p + 2 ) / 2
        bw = 1.1*((p+2)*self.hGrid+self.hBlock/2)#*np.sqrt(self.Dim)
        (lindBlockWithinBand,) = np.where(dBlockCenter<bw)
        lindBlockWithinBand = lindBlockWithinBand+Blockstart
        lBlockSize = lindBlockWithinBand.size
        numTotalBlockWBand = comm.allreduce(lBlockSize)



        numBlockWBandAssigned = numTotalBlockWBand // comm.size + int(comm.rank < (numTotalBlockWBand % comm.size))

        lindBlockWBandFrom = PETSc.Vec().createWithArray(lindBlockWithinBand,comm=comm)
        self.gindBlockWBand = PETSc.Vec().createMPI((numBlockWBandAssigned,PETSc.DECIDE),comm=comm)







#        gsubBlockWBandFrom = PETSc.Vec().createMPI((self.Dim*lBlockize,PETSc.DECIDE),comm=comm)
#        gsubBlockWBandFrom.setArray(lsubBlockWBand)
#        self.gsubBlockWBand = PETSc.Vec().createMPI((self.Dim*self.numBlockWBandAssigned,PETSc.DECIDE),comm=comm)
        BlockWBandStart = comm.exscan(numBlockWBandAssigned)
        if comm.rank == 0:
            BlockWBandStart = 0
        self.BlockWBandStart = BlockWBandStart


        LInd = PETSc.IS().createStride(numBlockWBandAssigned,\
                                       first=BlockWBandStart,\
                                       step=1,comm=comm)


        self.numTotalBlockWBand = numTotalBlockWBand
        self.numBlockWBandAssigned = numBlockWBandAssigned

        BlockWBandEnd = BlockWBandStart + numBlockWBandAssigned
        self.BlockWBandEnd = BlockWBandEnd


        scatter = PETSc.Scatter().create(lindBlockWBandFrom,LInd,self.gindBlockWBand,None)
        scatter.scatter(lindBlockWBandFrom,self.gindBlockWBand,PETSc.InsertMode.INSERT)
        #Natural order Index To Petsc order Index
        self.ni2pi = PETSc.AO().createMapping(self.gindBlockWBand.getArray().astype(np.int64))
示例#4
0
    def SelectBlock(self,surface = None):
        '''
	Starting from structured blocks in a big cube, find and keep those blocks which are near the surface.
	Before selection, need to distribute blocks to processors s.t. each processor has roughly the same
	number of blocks; and after selecting those blocks within band, some processors might have few (or no) 
	blocks but others might have many, we let PETSc decide how to re-distribute the blocks to ensure
	load balance again.
	Technically the re-distribution is done via the 'scatter' function.
	'''
        if surface is None:
            surface = self.surface
        comm = self.comm
        numTotalBlock = self.M**self.Dim
        
	# Decide how many blocks should be assigned to each processor.
	# If numTotalBlock = k*comm.size + r: each of the first r processors has k+1 blocks,
        # each of the rest processors has k blocks.
        numBlockAssigned = numTotalBlock // comm.size + int(comm.rank < (numTotalBlock % comm.size))
        
	# Exclusive scan with default operation '+'. 
        # On the i-th processor, sum up the 'numBlockAssinged' in the previous 0,1...,(i-1)-th processors.
        Blockstart = comm.exscan(numBlockAssigned)
        if comm.rank == 0:Blockstart = 0
		
	# Linear indices of the Blocks(consecutive numbers), each processor owns its own part of global indices.
        indBlock = np.arange(Blockstart,Blockstart+numBlockAssigned)
        
	# Sub indices (i,j,k) of the Blocks with respect to the virtual shape M*M*M
        subBlock = self.BlockInd2SubWithoutBand(indBlock)
        
	# Find the coordinates in the center of each block 
        BlockCenterCar = self.BlockSub2CenterCarWithoutBand(subBlock)

        cp,_,_,_ = surface.closest_point(BlockCenterCar)
        #cp,_,_,_ = surface.cp(BlockCenterCar)
        dBlockCenter = self.norm1(cp-BlockCenterCar)
        p = self.interpDegree
        if p % 2 == 1:
            p = ( p + 1 ) / 2
        else:
            p = ( p + 2 ) / 2
        bw = 1.1*((p+2)*self.hGrid+self.hBlock/2)#*np.sqrt(self.Dim)
        (lindBlockWithinBand,) = np.where(dBlockCenter<bw)

        # The above np.where finds the indices starting from 0 (local indices), we add Blockstart to make it global.  
        lindBlockWithinBand = lindBlockWithinBand+Blockstart

        lBlockSize = lindBlockWithinBand.size
        numTotalBlockWBand = comm.allreduce(lBlockSize)

        numBlockWBandAssigned = numTotalBlockWBand // comm.size + int(comm.rank < (numTotalBlockWBand % comm.size))

        # Creat a PETSc vector from numpy array? Not sure what's exactly doing here. 
	# 'lindBlockWBandFrom' is the vector FROM which we want to scatter. 
        lindBlockWBandFrom = PETSc.Vec().createWithArray(lindBlockWithinBand,comm=comm)

        # Allocate memory for the vector 'self.gindBlockWBand' TO which we want to scatter. 
        self.gindBlockWBand = PETSc.Vec().createMPI((numBlockWBandAssigned,PETSc.DECIDE),comm=comm)


#        gsubBlockWBandFrom = PETSc.Vec().createMPI((self.Dim*lBlockize,PETSc.DECIDE),comm=comm)
#        gsubBlockWBandFrom.setArray(lsubBlockWBand)
#        self.gsubBlockWBand = PETSc.Vec().createMPI((self.Dim*self.numBlockWBandAssigned,PETSc.DECIDE),comm=comm)

        BlockWBandStart = comm.exscan(numBlockWBandAssigned)
        if comm.rank == 0:
            BlockWBandStart = 0
        self.BlockWBandStart = BlockWBandStart
        
        # Index sets of the vector FROM which we want to scatter.  
        LInd = PETSc.IS().createStride(numBlockWBandAssigned,\
                                       first=BlockWBandStart,\
                                       step=1,comm=comm)
        
        # Re-scatter the blocks within band. The last argument of PETSc.Scatter().create() should be the index sets 
        # of the vector TO which we want to scatter. 'None' means that we fill the entire vector 'self.gindBlockWBand'. 
        scatter = PETSc.Scatter().create(lindBlockWBandFrom,LInd,self.gindBlockWBand,None)
        scatter.scatter(lindBlockWBandFrom,self.gindBlockWBand,PETSc.InsertMode.INSERT)
       
	# Natural order Index To Petsc order Index
        # TODO: colin needs to change this int64 to int32: what is correct thing to do here?
        self.ni2pi = PETSc.AO().createMapping(self.gindBlockWBand.getArray().astype(np.int64))
        #self.ni2pi = PETSc.AO().createMapping(self.gindBlockWBand.getArray().astype(np.int32))
		
        self.numTotalBlockWBand = numTotalBlockWBand
	self.numBlockWBandAssigned = numBlockWBandAssigned
	self.BlockWBandEnd = BlockWBandStart + numBlockWBandAssigned
示例#5
0
文件: system.py 项目: naylor-b/CMF
    def _initializePETScScatters(self):
        """ First, defines the PETSc Vec application ordering objects """
        getLinspace = lambda m1, m2: numpy.array(
            numpy.linspace(m1, m2 - 1, m2 - m1), 'i')
        varSizes = self.varSizes

        appIndices = []
        i = self.rank
        for j in xrange(len(self.variables)):
            m1 = numpy.sum(varSizes[:, :j]) + numpy.sum(varSizes[:i, j])
            m2 = m1 + varSizes[i, j]
            appIndices.append(getLinspace(m1, m2))
        appIndices = numpy.concatenate(appIndices)

        m1 = numpy.sum(varSizes[:self.rank, :])
        m2 = m1 + numpy.sum(varSizes[self.rank, :])
        petscIndices = getLinspace(m1, m2)

        ISapp = PETSc.IS().createGeneral(appIndices, comm=self.comm)
        ISpetsc = PETSc.IS().createGeneral(petscIndices, comm=self.comm)
        self.AOvarPETSc = PETSc.AO().createBasic(ISapp,
                                                 ISpetsc,
                                                 comm=self.comm)
        """ Next, the scatters are defined """
        def createScatter(self, varInds, argInds):
            merge = lambda x: numpy.concatenate(x) if len(x) > 0 else []
            ISvar = PETSc.IS().createGeneral(merge(varInds), comm=self.comm)
            ISarg = PETSc.IS().createGeneral(merge(argInds), comm=self.comm)
            ISvar = self.AOvarPETSc.app2petsc(ISvar)
            if ISvar.array.shape[0] == 0:
                return None
            else:
                return PETSc.Scatter().create(self.vVarPETSc, ISvar,
                                              self.vArgPETSc, ISarg)

        variableIndex = self.variables.keys().index
        varIndsFull = []
        argIndsFull = []
        i1, i2 = 0, 0
        for system in self.subsystems:
            varIndsFwd = []
            argIndsFwd = []
            varIndsRev = []
            argIndsRev = []
            for (n1, c1) in system.variables:
                args = system.variables[n1, c1]['args']
                for (n2, c2) in args:
                    if (n2, c2) not in system.variables \
                            and (n2, c2) in self.variables:
                        j = variableIndex((n1, c1))
                        i2 += args[n2, c2].shape[0]
                        varInds = numpy.sum(varSizes[:, :j]) + args[n2, c2]
                        argInds = getLinspace(i1, i2)
                        i1 += args[n2, c2].shape[0]
                        if variableIndex((n1, c1)) > variableIndex((n2, c2)):
                            varIndsFwd.append(varInds)
                            argIndsFwd.append(argInds)
                        else:
                            varIndsRev.append(varInds)
                            argIndsRev.append(argInds)
                        varIndsFull.append(varInds)
                        argIndsFull.append(argInds)
            system.scatterFwd = createScatter(self, varIndsFwd, argIndsFwd)
            system.scatterRev = createScatter(self, varIndsRev, argIndsRev)
        self.scatterFull = createScatter(self, varIndsFull, argIndsFull)

        for system in self.localSubsystems:
            system._initializePETScScatters()