Exemple #1
0
def setupPETScmatrix(Vr, Vc, mattype, mpicomm):
    """ 
    Set up a PETSc matrix partitioned consistently with Fenics mesh
    Vr, Vc = function spaces for the rows and columns
    """
    
    # extract local to global map for each fct space
    VrDM, VcDM = Vr.dofmap(), Vc.dofmap()
    r_map = PETSc.LGMap().create(VrDM.dofs(), comm=mpicomm)
    c_map = PETSc.LGMap().create(VcDM.dofs(), comm=mpicomm)
    # set up matrix
    petscmatrix = PETSc.Mat()
    petscmatrix.create(mpicomm)
    try:
        localdimVr = VrDM.local_dimension("owned")
        localdimVc = VcDM.local_dimension("owned")
    except:
        VrDMim = VrDM.index_map()
        localdimVr = VrDMim.size(VrDMim.MapSize_OWNED)
        VcDMim = VcDM.index_map()
        localdimVc = VcDMim.size(VcDMim.MapSize_OWNED)
    petscmatrix.setSizes([ [localdimVr, Vr.dim()], \
    [localdimVc, Vc.dim()] ])
    petscmatrix.setType(mattype) # 'aij', 'dense'
    petscmatrix.setUp()
    petscmatrix.setLGMap(r_map, c_map)
    # compare PETSc and Fenics local partitions:
    Istart, Iend = petscmatrix.getOwnershipRange()
    assert list(VrDM.dofs()) == range(Istart, Iend)
    return petscmatrix, VrDM, VcDM
Exemple #2
0
def petsc_serial_matrix(test_space, trial_space, nnz=None):
    '''
    PETsc.Mat from trial_space to test_space to be filled in the 
    with block. The spaces can be represented by intergers meaning 
    generic R^n.
    '''
    # Decide local to global map
    # For our custom case everything is serial
    if is_number(test_space) and is_number(trial_space):
        comm = mpi_comm_world().tompi4py()
        # Local same as global
        sizes = [[test_space, test_space], [trial_space, trial_space]]

        row_map = PETSc.IS().createStride(test_space, 0, 1, comm)
        col_map = PETSc.IS().createStride(trial_space, 0, 1, comm)
    # With function space this can be extracted
    else:
        mesh = test_space.mesh()
        comm = mesh.mpi_comm().tompi4py()

        row_map = test_space.dofmap()
        col_map = trial_space.dofmap()

        sizes = [[
            row_map.index_map().size(IndexMap.MapSize_OWNED),
            row_map.index_map().size(IndexMap.MapSize_GLOBAL)
        ],
                 [
                     col_map.index_map().size(IndexMap.MapSize_OWNED),
                     col_map.index_map().size(IndexMap.MapSize_GLOBAL)
                 ]]

        row_map = map(int, row_map.tabulate_local_to_global_dofs())
        col_map = map(int, col_map.tabulate_local_to_global_dofs())

    assert comm.size == 1

    lgmap = lambda indices: (PETSc.LGMap().create(indices, comm=comm)
                             if isinstance(indices, list) else PETSc.LGMap().
                             createIS(indices))

    row_lgmap, col_lgmap = map(lgmap, (row_map, col_map))

    # Alloc
    mat = PETSc.Mat().createAIJ(sizes, nnz=nnz, comm=comm)
    mat.setUp()

    mat.setLGMap(row_lgmap, col_lgmap)

    mat.assemblyBegin()
    # Fill
    yield mat
    # Tear down
    mat.assemblyEnd()
Exemple #3
0
def petsc_identity(N, dofs=None):
    """
    Create an identity matrix using petsc4py. Note: this currently
    only works in one process.


    """

    from petsc4py import PETSc

    v = PETSc.Vec()
    v.create()
    v.setSizes(N)
    v.setType('standard')
    v.setValues(range(N), [1.0] * N)

    A = PETSc.Mat()
    A.createAIJ([N, N], nnz=N)
    if dofs is not None:
        lgmap = PETSc.LGMap().create(dofs)
        A.setLGMap(lgmap, lgmap)
    A.setDiagonal(v)
    A.assemble()

    return dlf.PETScMatrix(A)
def identity_matrix(V, d=1.):
    '''Diagonal matrix'''
    # Avoiding assembly if this is Real space.
    diag = Function(V).vector()
    global_size = diag.size()
    local_size = diag.local_size()
    comm = V.mesh().mpi_comm().tompi4py()

    mat = PETSc.Mat().createAIJ(size=[[local_size, global_size],
                                      [local_size, global_size]],
                                nnz=1,
                                comm=comm)
    diag = as_backend_type(diag).vec()
    diag.set(d)
    mat.setDiagonal(diag)

    lgmap = PETSc.LGMap().create(map(
        int,
        V.dofmap().tabulate_local_to_global_dofs()),
                                 comm=comm)
    mat.setLGMap(lgmap, lgmap)

    mat.assemblyBegin()
    mat.assemblyEnd()

    return PETScMatrix(mat)
Exemple #5
0
 def lgmap(self):
     """A PETSc LGMap mapping process-local indices to global
     indices for this :class:`DataSet`.
     """
     lgmap = PETSc.LGMap()
     lgmap.create(indices=np.arange(1, dtype=IntType),
                  bsize=self.cdim, comm=self.comm)
     return lgmap
Exemple #6
0
 def unblocked_lgmap(self):
     """A PETSc LGMap mapping process-local indices to global
     indices for this :class:`DataSet` with a block size of 1.
     """
     indices = self.lgmap.indices
     lgmap = PETSc.LGMap().create(indices=indices,
                                  bsize=1, comm=self.lgmap.comm)
     return lgmap
Exemple #7
0
def masked_lgmap(lgmap, mask, block=True):
    if block:
        indices = lgmap.block_indices.copy()
        bsize = lgmap.getBlockSize()
    else:
        indices = lgmap.indices.copy()
        bsize = 1
    indices[mask] = -1
    return PETSc.LGMap().create(indices=indices, bsize=bsize, comm=lgmap.comm)
Exemple #8
0
 def __init__(self,
              ghosted_csr_mat,
              par_bs,
              par_n,
              par_N,
              par_nghost,
              subdomain2global,
              blockVecType="simple",
              pde=None):
     self.pde = pde
     p4pyPETSc.Mat.__init__(self)
     self.ghosted_csr_mat = ghosted_csr_mat
     self.blockVecType = blockVecType
     assert self.blockVecType == "simple", "petsc4py wrappers require self.blockVecType=simple"
     self.create(p4pyPETSc.COMM_WORLD)
     blockSize = max(1, par_bs)
     if blockSize >= 1 and blockVecType != "simple":
         ## \todo fix block aij in ParMat_petsc4py
         self.setType('baij')
         self.setSizes([[blockSize * par_n, blockSize * par_N],
                        [blockSize * par_n, blockSize * par_N]],
                       bsize=blockSize)
         self.setBlockSize(blockSize)
         self.subdomain2global = subdomain2global  #no need to include extra block dofs?
     else:
         self.setType('aij')
         self.setSizes([[par_n * blockSize, par_N * blockSize],
                        [par_n * blockSize, par_N * blockSize]],
                       bsize=1)
         if blockSize > 1:  #have to build in block dofs
             subdomain2globalTotal = numpy.zeros(
                 (blockSize * subdomain2global.shape[0], ), 'i')
             for j in range(blockSize):
                 subdomain2globalTotal[
                     j::blockSize] = subdomain2global * blockSize + j
             self.subdomain2global = subdomain2globalTotal
         else:
             self.subdomain2global = subdomain2global
     import Comm
     comm = Comm.get()
     logEvent(
         "ParMat_petsc4py comm.rank= %s blockSize = %s par_n= %s par_N=%s par_nghost=%s par_jacobian.getSizes()= %s "
         % (comm.rank(), blockSize, par_n, par_N, par_nghost,
            self.getSizes()))
     self.csr_rep = ghosted_csr_mat.getCSRrepresentation()
     blockOwned = blockSize * par_n
     self.csr_rep_owned = ghosted_csr_mat.getSubMatCSRrepresentation(
         0, blockOwned)
     self.petsc_l2g = p4pyPETSc.LGMap()
     self.petsc_l2g.create(self.subdomain2global)
     self.colind_global = self.petsc_l2g.apply(
         self.csr_rep_owned[1])  #prealloc needs global indices
     self.setPreallocationCSR(
         [self.csr_rep_owned[0], self.colind_global, self.csr_rep_owned[2]])
     self.setUp()
     self.setLGMap(self.petsc_l2g)
     self.setFromOptions()
Exemple #9
0
def petsc_serial_matrix(test_space, trial_space, nnz=None):
    '''
    PETsc.Mat from trial_space to test_space to be filled in the 
    with block. The spaces can be represented by intergers meaning 
    generic R^n.
    '''
    mesh = test_space.mesh()
    comm = mesh.mpi_comm()
    assert comm.size == 1

    row_map = test_space.dofmap()
    col_map = trial_space.dofmap()

    sizes = [[
        row_map.index_map().size(df.IndexMap.MapSize.OWNED),
        row_map.index_map().size(df.IndexMap.MapSize.GLOBAL)
    ],
             [
                 col_map.index_map().size(df.IndexMap.MapSize.OWNED),
                 col_map.index_map().size(df.IndexMap.MapSize.GLOBAL)
             ]]

    row_map = list(map(int, row_map.tabulate_local_to_global_dofs()))
    col_map = list(map(int, col_map.tabulate_local_to_global_dofs()))

    lgmap = lambda indices: (PETSc.LGMap().create(indices, comm=comm)
                             if isinstance(indices, list) else PETSc.LGMap().
                             createIS(indices))

    row_lgmap, col_lgmap = list(map(lgmap, (row_map, col_map)))

    # Alloc
    mat = PETSc.Mat().createAIJ(sizes, nnz=nnz, comm=comm)
    mat.setUp()

    mat.setLGMap(row_lgmap, col_lgmap)

    mat.assemblyBegin()
    # Fill
    yield mat
    # Tear down
    mat.assemblyEnd()
Exemple #10
0
 def lgmap(self):
     """A PETSc LGMap mapping process-local indices to global
     indices for this :class:`DataSet`.
     """
     lgmap = PETSc.LGMap()
     if self.comm.size == 1:
         lgmap.create(indices=np.arange(self.size, dtype=IntType),
                      bsize=self.cdim, comm=self.comm)
     else:
         lgmap.create(indices=self.halo.local_to_global_numbering,
                      bsize=self.cdim, comm=self.comm)
     return lgmap
 def __init__(self,ghosted_csr_mat=None,par_bs=None,par_n=None,par_N=None,par_nghost=None,subdomain2global=None,blockVecType="simple",pde=None, par_nc=None, par_Nc=None, proteus_jacobian=None, nzval_proteus2petsc=None):
     p4pyPETSc.Mat.__init__(self)
     if ghosted_csr_mat is None:
         return#when duplicating for petsc usage
     self.pde = pde
     if par_nc is None:
         par_nc = par_n
     if par_Nc is None:
         par_Nc = par_N
     self.proteus_jacobian=proteus_jacobian
     self.nzval_proteus2petsc = nzval_proteus2petsc
     self.ghosted_csr_mat=ghosted_csr_mat
     self.blockVecType = blockVecType
     assert self.blockVecType == "simple", "petsc4py wrappers require self.blockVecType=simple"
     self.create(p4pyPETSc.COMM_WORLD)
     self.blockSize = max(1,par_bs)
     if self.blockSize > 1 and blockVecType != "simple":
         ## \todo fix block aij in ParMat_petsc4py
         self.setType('baij')
         self.setSizes([[self.blockSize*par_n,self.blockSize*par_N],[self.blockSize*par_nc,self.blockSize*par_Nc]],bsize=self.blockSize)
         self.setBlockSize(self.blockSize)
         self.subdomain2global = subdomain2global #no need to include extra block dofs?
     else:
         self.setType('aij')
         self.setSizes([[par_n*self.blockSize,par_N*self.blockSize],[par_nc*self.blockSize,par_Nc*self.blockSize]],bsize=1)
         if self.blockSize > 1: #have to build in block dofs
             subdomain2globalTotal = numpy.zeros((self.blockSize*subdomain2global.shape[0],),'i')
             for j in range(self.blockSize):
                 subdomain2globalTotal[j::self.blockSize]=subdomain2global*self.blockSize+j
             self.subdomain2global=subdomain2globalTotal
         else:
             self.subdomain2global=subdomain2global
     from proteus import Comm
     comm = Comm.get()
     logEvent("ParMat_petsc4py comm.rank= %s blockSize = %s par_n= %s par_N=%s par_nghost=%s par_jacobian.getSizes()= %s "
              % (comm.rank(),self.blockSize,par_n,par_N,par_nghost,self.getSizes()))
     self.csr_rep = ghosted_csr_mat.getCSRrepresentation()
     if self.proteus_jacobian is not None:
         self.proteus_csr_rep = self.proteus_jacobian.getCSRrepresentation()
     if self.blockSize > 1:
         blockOwned = self.blockSize*par_n
         self.csr_rep_local = ghosted_csr_mat.getSubMatCSRrepresentation(0,blockOwned)
     else:
         self.csr_rep_local = ghosted_csr_mat.getSubMatCSRrepresentation(0,par_n)
     self.petsc_l2g = p4pyPETSc.LGMap()
     self.petsc_l2g.create(self.subdomain2global)
     self.setUp()
     self.setLGMap(self.petsc_l2g)
     #
     self.colind_global = self.petsc_l2g.apply(self.csr_rep_local[1]) #prealloc needs global indices
     self.setPreallocationCSR([self.csr_rep_local[0],self.colind_global,self.csr_rep_local[2]])
     self.setFromOptions()
Exemple #12
0
def assemble_mtx_to_petsc(pmtx,
                          mtx,
                          pdofs,
                          drange,
                          is_overlap=True,
                          comm=None,
                          verbose=False):
    """
    Assemble a local CSR matrix to a global PETSc matrix.

    WIP
    ---
    Try Mat.setValuesCSR() - no lgmap - filtering vectorized?
    """
    if comm is None:
        comm = PETSc.COMM_WORLD

    lgmap = PETSc.LGMap().create(pdofs, comm=comm)

    if is_overlap:
        pmtx.setLGMap(lgmap, lgmap)

        data, prows, cols = mtx.data, mtx.indptr, mtx.indices

        output('setting matrix values...', verbose=verbose)
        tt = time.clock()
        for ir, rdof in enumerate(pdofs):
            if (rdof < drange[0]) or (rdof >= drange[1]): continue

            for ic in range(prows[ir], prows[ir + 1]):
                # output(ir, rdof, cols[ic])
                pmtx.setValueLocal(ir, cols[ic], data[ic],
                                   PETSc.InsertMode.INSERT_VALUES)
        output('...done in', time.clock() - tt, verbose=verbose)

        output('assembling matrix...', verbose=verbose)
        tt = time.clock()
        pmtx.assemble()
        output('...done in', time.clock() - tt, verbose=verbose)

    else:
        pmtx.setLGMap(lgmap, lgmap)
        output('setting matrix values...', verbose=verbose)
        tt = time.clock()
        pmtx.setValuesLocalCSR(mtx.indptr, mtx.indices, mtx.data,
                               PETSc.InsertMode.ADD_VALUES)
        output('...done in', time.clock() - tt, verbose=verbose)

        output('assembling matrix...', verbose=verbose)
        tt = time.clock()
        pmtx.assemble()
        output('...done in', time.clock() - tt, verbose=verbose)
Exemple #13
0
def set_lg_map(mat):
    '''Set local-to-global-map on the matrix'''
    # NOTE; serial only - so we own everything but still sometimes we need
    # to tell that to petsc (especiialy when bcs are to be applied)

    if is_number(mat): return mat

    assert is_petsc_mat(mat) or isinstance(mat, block_mat), (type(mat))

    if isinstance(mat, block_mat):
        blocks = np.array(map(set_lg_map, mat.blocks.flatten())).reshape(mat.blocks.shape)
        return block_mat(blocks)

    comm = mpi_comm_world().tompi4py()
    # Work with matrix
    rowmap, colmap = range(mat.size(0)), range(mat.size(1))

    row_lgmap = PETSc.LGMap().create(rowmap, comm=comm)
    col_lgmap = PETSc.LGMap().create(colmap, comm=comm)

    as_petsc(mat).setLGMap(row_lgmap, col_lgmap)

    return mat
Exemple #14
0
 def setUp(self):
     comm = PETSc.COMM_WORLD
     comm_size = comm.getSize()
     comm_rank = comm.getRank()
     lsize = 10
     first = lsize * comm_rank
     last = first + lsize
     if comm_rank > 0:
         first -= 1
     if comm_rank < (comm_size - 1):
         last += 1
     self.idx = list(range(first, last))
     bs = self.BS
     self.lgmap = PETSc.LGMap().create(self.idx, bs, comm=PETSc.COMM_WORLD)
Exemple #15
0
    def __init__(self, comm, N):
        self.comm = comm
        self.N = N  # global problem size
        self.h = 1 / N  # grid spacing on unit interval
        self.n = N // comm.size + int(
            comm.rank < (N % comm.size))  # owned part of global problem
        self.start = comm.exscan(self.n)
        if comm.rank == 0: self.start = 0
        gindices = numpy.arange(self.start - 1,
                                self.start + self.n + 1,
                                dtype=PETSc.IntType) % N  # periodic
        self.mat = PETSc.Mat().create(comm=comm)
        size = (self.n, self.N)  # local and global sizes
        self.mat.setSizes((size, size))
        self.mat.setFromOptions()
        self.mat.setPreallocationNNZ(
            (3, 1)
        )  # Conservative preallocation for 3 "local" columns and one non-local

        # Allow matrix insertion using local indices [0:n+2]
        lgmap = PETSc.LGMap().create(list(gindices), comm=comm)
        self.mat.setLGMap(lgmap, lgmap)

        # Global and local vectors
        self.gvec = self.mat.createVecRight()
        self.lvec = PETSc.Vec().create(comm=PETSc.COMM_SELF)
        self.lvec.setSizes(self.n + 2)
        self.lvec.setUp()
        # Configure scatter from global to local
        isg = PETSc.IS().createGeneral(list(gindices), comm=comm)
        self.g2l = PETSc.Scatter().create(self.gvec, isg, self.lvec, None)

        self.tozero, self.zvec = PETSc.Scatter.toZero(self.gvec)
        self.history = []

        if False:  # Print some diagnostics
            print('[%d] local size %d, global size %d, starting offset %d' %
                  (comm.rank, self.n, self.N, self.start))
            self.gvec.setArray(numpy.arange(self.start, self.start + self.n))
            self.gvec.view()
            self.g2l.scatter(self.gvec, self.lvec, PETSc.InsertMode.INSERT)
            for rank in range(comm.size):
                if rank == comm.rank:
                    print('Contents of local Vec on rank %d' % rank)
                    self.lvec.view()
                comm.barrier()
def assemble_mtx_to_petsc(pmtx,
                          mtx,
                          pdofs,
                          drange,
                          is_overlap=True,
                          comm=None,
                          verbose=False):
    """
    Assemble a local CSR matrix to a global PETSc matrix.
    """
    if comm is None:
        comm = PETSc.COMM_WORLD

    timer = Timer()

    lgmap = PETSc.LGMap().create(pdofs, comm=comm)
    pmtx.setLGMap(lgmap, lgmap)
    if is_overlap:
        output('setting matrix values...', verbose=verbose)
        timer.start()
        mask = (pdofs < drange[0]) | (pdofs >= drange[1])
        nnz_per_row = nm.diff(mtx.indptr)
        mtx2 = mtx.copy()
        mtx2.data[nm.repeat(mask, nnz_per_row)] = 0
        mtx2.eliminate_zeros()
        pmtx.setValuesLocalCSR(mtx2.indptr, mtx2.indices, mtx2.data,
                               PETSc.InsertMode.INSERT_VALUES)
        output('...done in', timer.stop(), verbose=verbose)

        output('assembling matrix...', verbose=verbose)
        timer.start()
        pmtx.assemble()
        output('...done in', timer.stop(), verbose=verbose)

    else:
        output('setting matrix values...', verbose=verbose)
        timer.start()
        pmtx.setValuesLocalCSR(mtx.indptr, mtx.indices, mtx.data,
                               PETSc.InsertMode.ADD_VALUES)
        output('...done in', timer.stop(), verbose=verbose)

        output('assembling matrix...', verbose=verbose)
        timer.start()
        pmtx.assemble()
        output('...done in', timer.stop(), verbose=verbose)
Exemple #17
0
def assemble_rhs_to_petsc(prhs,
                          rhs,
                          pdofs,
                          drange,
                          is_overlap=True,
                          comm=None,
                          verbose=False):
    """
    Assemble a local right-hand side vector to a global PETSc vector.
    """
    if comm is None:
        comm = PETSc.COMM_WORLD

    lgmap = PETSc.LGMap().create(pdofs, comm=comm)

    if is_overlap:
        prhs.setLGMap(lgmap)
        output('setting rhs values...', verbose=verbose)
        tt = time.clock()
        for ir, rdof in enumerate(pdofs):
            if (rdof < drange[0]) or (rdof >= drange[1]): continue
            prhs.setValueLocal(ir, rhs[ir], PETSc.InsertMode.INSERT_VALUES)
        output('...done in', time.clock() - tt, verbose=verbose)

        output('assembling rhs...', verbose=verbose)
        tt = time.clock()
        prhs.assemble()
        output('...done in', time.clock() - tt, verbose=verbose)

    else:
        prhs.setLGMap(lgmap)
        output('setting rhs values...', verbose=verbose)
        tt = time.clock()
        prhs.setValuesLocal(nm.arange(len(rhs), dtype=nm.int32), rhs,
                            PETSc.InsertMode.ADD_VALUES)
        output('...done in', time.clock() - tt, verbose=verbose)

        output('assembling rhs...', verbose=verbose)
        tt = time.clock()
        prhs.assemble()
        output('...done in', time.clock() - tt, verbose=verbose)
Exemple #18
0
def CreatePETScMatrix(ngs_mat):
    pardofs = ngs_mat.row_pardofs
    # comm = MPI.COMM_WORLD
    comm = pardofs.comm.mpi4py
    globnums, nglob = pardofs.EnumerateGlobally()
    iset = psc.IS().createGeneral(indices=globnums, comm=comm)
    lgmap = psc.LGMap().createIS(iset)

    locmat = ngs_mat.local_mat
    val, col, ind = locmat.CSR()
    ind = np.array(ind, dtype='int32')
    apsc_loc = psc.Mat().createAIJ(size=(locmat.height, locmat.width),
                                   csr=(ind, col, val),
                                   comm=MPI.COMM_SELF)

    mat = psc.Mat().createPython(size=nglob, comm=comm)
    mat.setType(psc.Mat.Type.IS)
    mat.setLGMap(lgmap)
    mat.setISLocalMat(apsc_loc)
    mat.assemble()
    mat.convert("mpiaij")
    return mat
Exemple #19
0
def CreatePETScMatrix(ngs_mat, freedofs=None):
    pardofs = ngs_mat.row_pardofs
    comm = pardofs.comm.mpi4py

    locmat = ngs_mat.local_mat
    eh, ew = locmat.entrysizes
    if eh != ew: raise Exception("only square entries are allowed")

    val, col, ind = locmat.CSR()
    ind = np.array(ind).astype(psc.IntType)
    col = np.array(col).astype(psc.IntType)
    apsc_loc = psc.Mat().createBAIJ(size=(eh * locmat.height,
                                          eh * locmat.width),
                                    bsize=eh,
                                    csr=(ind, col, val),
                                    comm=MPI.COMM_SELF)

    if freedofs is not None:
        locfree = np.flatnonzero(freedofs).astype(psc.IntType)
        isfree_loc = psc.IS().createBlock(indices=locfree, bsize=eh)
        apsc_loc = apsc_loc.createSubMatrices(isfree_loc)[0]

    globnums, nglob = pardofs.EnumerateGlobally(freedofs)
    if freedofs is not None:
        globnums = np.array(globnums, dtype=psc.IntType)[freedofs]

    lgmap = psc.LGMap().create(indices=globnums, bsize=eh, comm=comm)

    mat = psc.Mat().create(comm=comm)
    mat.setSizes(size=nglob * eh, bsize=eh)
    mat.setType(psc.Mat.Type.IS)
    mat.setLGMap(lgmap)
    mat.setISLocalMat(apsc_loc)
    mat.assemble()
    mat.convert("mpiaij")
    return mat
Exemple #20
0
    def __init__(self,
                 array=None,
                 bs=None,
                 n=None,
                 N=None,
                 nghosts=None,
                 subdomain2global=None,
                 blockVecType="simple"):
        if array == None:
            return  #cek hack, don't know why init gets called by PETSc.Vec duplicate function
        p4pyPETSc.Vec.__init__(self)
        blockSize = max(1, bs)
        self.dim_proc = n * blockSize
        self.nghosts = nghosts
        self.blockVecType = blockVecType
        assert self.blockVecType == "simple", "petsc4py wrappers require self.blockVecType=simple"
        self.proteus_array = array
        if nghosts == None:
            if blockVecType == "simple":
                self.createWithArray(array,
                                     size=(blockSize * n, blockSize * N),
                                     bsize=1)
            else:
                self.createWithArray(array,
                                     size=(blockSize * n, blockSize * N),
                                     bsize=blockSize)
            self.subdomain2global = subdomain2global
            self.petsc_l2g = None
        else:
            assert nghosts >= 0, "The number of ghostnodes must be non-negative"
            assert subdomain2global.shape[0] == (n + nghosts), (
                "The subdomain2global map is the wrong length n=%i,nghosts=%i,shape=%i \n"
                % (n, n + nghosts, subdomain2global.shape[0]))
            assert len(array.flat) == (n + nghosts) * blockSize
            if blockVecType == "simple":
                ghosts = numpy.zeros((blockSize * nghosts), 'i')
                for j in range(blockSize):
                    ghosts[j::blockSize] = subdomain2global[n:] * blockSize + j
                self.createGhostWithArray(ghosts,
                                          array,
                                          size=(blockSize * n, blockSize * N),
                                          bsize=1)
                if blockSize > 1:  #have to build in block dofs
                    subdomain2globalTotal = numpy.zeros(
                        (blockSize * subdomain2global.shape[0], ), 'i')
                    for j in range(blockSize):
                        subdomain2globalTotal[
                            j::blockSize] = subdomain2global * blockSize + j
                    self.subdomain2global = subdomain2globalTotal
                else:
                    self.subdomain2global = subdomain2global
                self.petsc_l2g = p4pyPETSc.LGMap()
                self.petsc_l2g.create(self.subdomain2global)
                self.setLGMap(self.petsc_l2g)

            else:
                #TODO need to debug
                ghosts = subdomain2global[n:]
                self.createGhostWithArray(ghosts,
                                          array,
                                          size=(blockSize * n, blockSize * N),
                                          bsize=blockSize)
                self.subdomain2global = subdomain2global
                self.petsc_l2g = p4pyPETSc.LGMap()
                self.petsc_l2g.create(self.subdomain2global)
                self.setLGMap(self.petsc_l2g)
        self.setFromOptions()
from petsc4py import PETSc

from dolfin import MPI, mpi_comm_world
#mycomm = mpi_comm_world()
mycomm = PETSc.COMM_WORLD
mpisize = MPI.size(mycomm)
mpirank = MPI.rank(mycomm)

mesh = dl.UnitSquareMesh(100, 100)
Vr = dl.FunctionSpace(mesh, 'Lagrange', 1)
Vc = dl.FunctionSpace(mesh, 'Lagrange', 1)
#
Vrdofmap, Vcdofmap = Vr.dofmap(), Vc.dofmap()
#print 'rank={}, Vr dofmap:'.format(mpirank), Vr.dofmap().dofs()
#print 'rank={}, Vc dofmap:'.format(mpirank), Vc.dofmap().dofs()
rmap = PETSc.LGMap().create(Vrdofmap.dofs(), mycomm)
cmap = PETSc.LGMap().create(Vcdofmap.dofs(), mycomm)
"""
if mpirank == 0:
    gindices = PETSc.IS().createGeneral([6,7,8])
    #gindices = [6,7,8]
else:
    gindices = PETSc.IS().createGeneral([0,1,2,3,4,5])
    #gindices = [0,1,2,3,4,5]
rmap = PETSc.LGMap().create(gindices, mycomm)
cmap = PETSc.LGMap().create(gindices, mycomm)
print 'rank={}, rmap indices:'.format(mpirank), rmap.getIndices()
print 'rank={}, cmap indices:'.format(mpirank), cmap.getIndices()
"""
#
MPETSc = PETSc.Mat()
Exemple #22
0
    def _create_DMPlex(self, dim, coords, cells, elev):
        """
        Create a PETSc DMPlex object from the mesh attributes

        Args:
            dim: mesh dimensions
            coords: mesh coordinates
            cells: cell nodes indices
            elev: nodes elevation
        """

        t0 = clock()
        self.dm = PETSc.DMPlex().createFromCellList(dim,
                                                    cells,
                                                    coords,
                                                    comm=PETSc.COMM_WORLD)
        if MPIrank == 0 and self.verbose:
            print('Create DMPlex (%0.02f seconds)' % (clock() - t0))

        # Create boundary labels
        t0 = clock()
        label = "boundary"
        self._set_DMPlex_boundary_points(label)

        # label coarse DM in case it is ever needed again
        self.dm.createLabel("coarse")
        pStart, pEnd = self.dm.getDepthStratum(0)
        for pt in range(pStart, pEnd):
            self.dm.setLabelValue("coarse", pt, 1)

        # Define one DoF on the nodes
        self.dm.setNumFields(1)
        origSect = self.dm.createSection(1, [1, 0, 0])
        origSect.setFieldName(0, "points")
        origSect.setUp()
        self.dm.setDefaultSection(origSect)
        origVec = self.dm.createGlobalVector()

        # Distribute to other processors if any
        if MPIsize > 1:
            sf = self.dm.distribute(overlap=1)
            newSect, newVec = self.dm.distributeField(sf, origSect, origVec)
            self.dm.setDefaultSection(newSect)
            newSect.destroy()
            newVec.destroy()
            sf.destroy()
        origVec.destroy()
        origSect.destroy()

        self.hGlobal = self.dm.createGlobalVector()
        self.hLocal = self.dm.createLocalVector()
        self.sizes = self.hGlobal.getSizes(), self.hGlobal.getSizes()

        # Local/Global mapping
        self.lgmap_row = self.dm.getLGMap()
        l2g = self.lgmap_row.indices.copy()
        offproc = l2g < 0
        l2g[offproc] = -(l2g[offproc] + 1)
        self.lgmap_col = PETSc.LGMap().create(l2g, comm=PETSc.COMM_WORLD)
        del l2g

        if MPIrank == 0 and self.verbose:
            print('Distribute DMPlex (%0.02f seconds)' % (clock() - t0))

        # Get natural numbering
        t0 = clock()
        coords = MPI.COMM_WORLD.bcast(coords, root=0)
        elev = MPI.COMM_WORLD.bcast(elev, root=0)
        self._naturalNumbering(coords)

        self.hLocal.setArray(elev[self.natural2local])
        self.dm.localToGlobal(self.hLocal, self.hGlobal)
        self.dm.globalToLocal(self.hGlobal, self.hLocal)

        if MPIrank == 0 and self.verbose:
            print('Distribute field to DMPlex (%0.02f seconds)' %
                  (clock() - t0))

        # Forcing event number
        self.rainNb = -1
        self.tecNb = -1

        return
Exemple #23
0
 def setUp(self):
     self.idx = self._mk_idx(PETSc.COMM_WORLD)
     self.iset = PETSc.IS().createGeneral(self.idx, comm=PETSc.COMM_WORLD)
     self.lgmap = PETSc.LGMap().create(self.iset)
Exemple #24
0
    def _init_block(self):
        self._blocks = [[self]]
        mat = PETSc.Mat()
        row_lg = PETSc.LGMap()
        col_lg = PETSc.LGMap()
        rdim, cdim = self.sparsity.dims
        if MPI.comm.size == 1:
            # The PETSc local to global mapping is the identity in the sequential case
            row_lg.create(
                indices=np.arange(self.sparsity.nrows, dtype=PETSc.IntType),
                bsize=rdim)
            col_lg.create(
                indices=np.arange(self.sparsity.ncols, dtype=PETSc.IntType),
                bsize=cdim)
            self._array = np.zeros(self.sparsity.nz, dtype=PETSc.RealType)
            # We're not currently building a blocked matrix, so need to scale the
            # number of rows and columns by the sparsity dimensions
            # FIXME: This needs to change if we want to do blocked sparse
            # NOTE: using _rowptr and _colidx since we always want the host values
            mat.createAIJWithArrays(
                (self.sparsity.nrows * rdim, self.sparsity.ncols * cdim),
                (self.sparsity._rowptr, self.sparsity._colidx, self._array))
        else:
            # We get the PETSc local to global mapping from the halo.
            # This gives us "block" indices, we need to splat those
            # out to dof indices for vector fields since we don't
            # currently assemble into block matrices.
            rindices = self.sparsity.rmaps[0].toset.halo.global_to_petsc_numbering
            cindices = self.sparsity.cmaps[0].toset.halo.global_to_petsc_numbering
            row_lg.create(indices=rindices, bsize=rdim)
            col_lg.create(indices=cindices, bsize=cdim)

            mat.createAIJ(size=((self.sparsity.nrows * rdim, None),
                                (self.sparsity.ncols * cdim, None)),
                          nnz=(self.sparsity.nnz, self.sparsity.onnz),
                          bsize=(rdim, cdim))
        mat.setBlockSizes(rdim, cdim)
        mat.setLGMap(rmap=row_lg, cmap=col_lg)
        # Do not stash entries destined for other processors, just drop them
        # (we take care of those in the halo)
        mat.setOption(mat.Option.IGNORE_OFF_PROC_ENTRIES, True)
        # Any add or insertion that would generate a new entry that has not
        # been preallocated will raise an error
        mat.setOption(mat.Option.NEW_NONZERO_ALLOCATION_ERR, True)
        # Do not ignore zeros while we fill the initial matrix so that
        # petsc doesn't compress things out.
        mat.setOption(mat.Option.IGNORE_ZERO_ENTRIES, False)
        # When zeroing rows (e.g. for enforcing Dirichlet bcs), keep those in
        # the nonzero structure of the matrix. Otherwise PETSc would compact
        # the sparsity and render our sparsity caching useless.
        mat.setOption(mat.Option.KEEP_NONZERO_PATTERN, True)
        # We completely fill the allocated matrix when zeroing the
        # entries, so raise an error if we "missed" one.
        mat.setOption(mat.Option.UNUSED_NONZERO_LOCATION_ERR, True)

        # Put zeros in all the places we might eventually put a value.
        sparsity.fill_with_zeros(mat, self.sparsity.dims, self.sparsity.maps)

        # Now we've filled up our matrix, so the sparsity is
        # "complete", we can ignore subsequent zero entries.
        mat.setOption(mat.Option.IGNORE_ZERO_ENTRIES, True)
        self._handle = mat
        # Matrices start zeroed.
        self._version_set_zero()
Exemple #25
0
from petsc4py import PETSc

mesh = dlf.UnitSquareMesh(100, 100)
W = dlf.FunctionSpace(mesh, 'CG', 1)

def boundary(x, on_boundary):
    return on_boundary

bc = dlf.DirichletBC(W, dlf.Constant(0.0), boundary)

N = mesh.num_vertices()
v = PETSc.Vec()
v.create()
v.setSizes(N)
v.setType('standard')
v.setValues(range(N), [N]*N)

B_pet = PETSc.Mat()
B_pet.createAIJ([N,N], nnz=N)

lgmap = PETSc.LGMap().create(W.dofmap().dofs())
B_pet.setLGMap(lgmap, lgmap)

B_pet.setDiagonal(v)
B_pet.assemblyBegin()
B_pet.assemblyEnd()

B = dlf.PETScMatrix(B_pet)
bc.apply(B) # error
Exemple #26
0
    def lgmap(self):
        """A PETSc LGMap mapping process-local indices to global
        indices for this :class:`MixedDataSet`.
        """
        lgmap = PETSc.LGMap()
        if self.comm.size == 1:
            size = sum(s.size * s.cdim for s in self)
            lgmap.create(indices=np.arange(size, dtype=IntType),
                         bsize=1,
                         comm=self.comm)
            return lgmap
        # Compute local to global maps for a monolithic mixed system
        # from the individual local to global maps for each field.
        # Exposition:
        #
        # We have N fields and P processes.  The global row
        # ordering is:
        #
        # f_0_p_0, f_1_p_0, ..., f_N_p_0; f_0_p_1, ..., ; f_0_p_P,
        # ..., f_N_p_P.
        #
        # We have per-field local to global numberings, to convert
        # these into multi-field local to global numberings, we note
        # the following:
        #
        # For each entry in the per-field l2g map, we first determine
        # the rank that entry belongs to, call this r.
        #
        # We know that this must be offset by:
        # 1. The sum of all field lengths with rank < r
        # 2. The sum of all lower-numbered field lengths on rank r.
        #
        # Finally, we need to shift the field-local entry by the
        # current field offset.
        idx_size = sum(s.total_size * s.cdim for s in self)
        indices = np.full(idx_size, -1, dtype=IntType)
        owned_sz = np.array([sum(s.size * s.cdim for s in self)],
                            dtype=IntType)
        field_offset = np.empty_like(owned_sz)
        self.comm.Scan(owned_sz, field_offset)
        field_offset -= owned_sz

        all_field_offsets = np.empty(self.comm.size, dtype=IntType)
        self.comm.Allgather(field_offset, all_field_offsets)

        start = 0
        all_local_offsets = np.zeros(self.comm.size, dtype=IntType)
        current_offsets = np.zeros(self.comm.size + 1, dtype=IntType)
        for s in self:
            idx = indices[start:start + s.total_size * s.cdim]
            owned_sz[0] = s.size * s.cdim
            self.comm.Scan(owned_sz, field_offset)
            self.comm.Allgather(field_offset, current_offsets[1:])
            # Find the ranks each entry in the l2g belongs to
            l2g = s.halo.local_to_global_numbering
            # If cdim > 1, we need to unroll the node numbering to dof
            # numbering
            if s.cdim > 1:
                new_l2g = np.empty(l2g.shape[0] * s.cdim, dtype=l2g.dtype)
                for i in range(s.cdim):
                    new_l2g[i::s.cdim] = l2g * s.cdim + i
                l2g = new_l2g
            tmp_indices = np.searchsorted(current_offsets, l2g,
                                          side="right") - 1
            idx[:] = l2g[:] - current_offsets[tmp_indices] + \
                all_field_offsets[tmp_indices] + all_local_offsets[tmp_indices]
            self.comm.Allgather(owned_sz, current_offsets[1:])
            all_local_offsets += current_offsets[1:]
            start += s.total_size * s.cdim
        lgmap.create(indices=indices, bsize=1, comm=self.comm)
        return lgmap
Exemple #27
0
comm = mesh.mpi_comm().tompi4py()
mat = PETSc.Mat()
mat.create(comm)
# Local, global
sizes = [
    dofmap.index_map().size(IndexMap.MapSize_OWNED),
    dofmap.index_map().size(IndexMap.MapSize_GLOBAL)
]
# Square
mat.setSizes([sizes, sizes])
# Sparse
mat.setType('aij')
mat.setUp()
# Map from local rows to gloval rows
lgmap = map(int, dofmap.tabulate_local_to_global_dofs())
lgmap = PETSc.LGMap().create(lgmap, comm=comm)
mat.setLGMap(lgmap, lgmap)

# Fill the values
mat.setDiagonal(v)
mat.assemblyBegin()
mat.assemblyEnd()

A = PETScMatrix(mat)
bc.apply(A)

# Check if we can do matvec. Just no crash
x, y = mat.createVecs()
x.setRandom()
y.setRandom()
x, y = PETScVector(x), PETScVector(y)
Exemple #28
0
 def scalar_lgmap(self):
     if self.cdim == 1:
         return self.lgmap
     indices = self.lgmap.block_indices
     return PETSc.LGMap().create(indices=indices, bsize=1, comm=self.comm)
Exemple #29
0
gfu.vec.data = inv * f.vec
masterprint("ngs-dot =", InnerProduct(gfu.vec, f.vec))

pardofs = fes.ParallelDofs()
globnums, nglob = pardofs.EnumerateGlobally()

locmat = a.mat.local_mat
val, col, ind = locmat.CSR()
ind = np.array(ind, dtype='int32')

apsc_loc = psc.Mat().createAIJ(size=(locmat.height, locmat.width),
                               csr=(ind, col, val),
                               comm=MPI.COMM_SELF)

IS = psc.IS().createBlock(bsize=1, indices=globnums, comm=comm)
lgmap = psc.LGMap().create(bsize=1, indices=globnums, comm=comm)

mat = psc.Mat().createPython(size=nglob, comm=comm)
mat.setType(psc.Mat.Type.IS)
mat.setLGMap(lgmap)
mat.setISLocalMat(apsc_loc)
mat.assemble()

f.vec.Cumulate()

v1, v2 = mat.createVecs()

v2loc = v2.getSubVector(IS)
v2loc.getArray()[:] = f.vec.FV()
v2.restoreSubVector(IS, v2loc)