コード例 #1
0
def solveSystem(A, b, x, rank):
    ''' Solve a matrix system of the form Ax = b in parallel.

    :param petsc matrix A: sparse and complex coefficients matrix in
                           petsc format
    :param petsc vector b: parallel right hand side
    :param petsc vector x: parallel solution vector
    :param int rank: MPI rank
    :return: solution of the equation system, iteration number
             of solver and elapsed time
    :rtype: petsc vector, int and float
    '''

    PETSc.Sys.syncPrint('  Rank: ', rank, ' is solving system')
    PETSc.Sys.syncFlush()

    # Start timer
    Init_solver = getTime()

    # Create KSP: linear equation solver
    ksp = PETSc.KSP().create(comm=PETSc.COMM_WORLD)
    ksp.setOperators(A)
    ksp.setFromOptions()
    ksp.solve(b, x)
    iterationNumber = ksp.getIterationNumber()
    ksp.destroy()

    # End timer
    End_solver = getTime()

    # Elapsed time in assembly
    elapsedTimeSolver = End_solver - Init_solver

    return x, iterationNumber, elapsedTimeSolver
コード例 #2
0
def parallelAssembler(modelling, A, b, nodes, elemsE, elemsN, elemsSigma,
                      Istart_elemsE, Iend_elemsE, rank):
    ''' Assembly matrix A and vector b for 3D CSEM in parallel.

    :param dictionary modelling: CSEM modelling with physical parameters
    :param petsc matrix A: left-hand side
    :param petsc vector b: right-hand side
    :param petsc matrix nodes: nodal coordinates
    :param petsc matrix elemsE: elements-edges connectivity
    :param petsc matrix elemsN: elements-nodes connectivity
    :param petsc vector elemsSigma: elements-conductivity array
    :param int Istart_elemsE: init range for assembly
    :param int Iend_elemsE: last range for assembly
    :para int rank: MPI rank
    :return: matrix A, vector b assembled, elapsedTimeAssembly
    :rtype: petsc matrix, petsc vector and float
    '''
    # Print information of assembly
    PETSc.Sys.syncPrint('  Rank: ', rank, ' is assembling ',
                        Iend_elemsE-Istart_elemsE, ' elements')
    PETSc.Sys.syncFlush()

    # Start timer
    Init_assembly = getTime()

    # Compute contributions for all local elements
    for iEle in np.arange(Istart_elemsE, Iend_elemsE):
        # Get coordinates of iEle
        coordEle = nodes.getRow(iEle)[1].real
        # Get edges of iEle
        edgesEle = (elemsE.getRow(iEle)[1].real).astype(PETSc.IntType)
        # Get nodal indexes of iEle
        nodesEle = elemsN.getRow(iEle)[1].real
        # Get sigma of iEle
        sigmaEle = elemsSigma.getValue(iEle).real
        # Compute elemental contributions for iEle
        # Elemental matrix (Ae) and elemental vector (be)
        [Ae, be] = computeElementalContributionsMPI(modelling, coordEle,
                                                    nodesEle, sigmaEle)
        # Add local contributions to global matrix
        A.setValues(edgesEle, edgesEle, Ae, addv=PETSc.InsertMode.ADD_VALUES)
        # Add local contributions to global vector
        b.setValues(edgesEle, be, addv=PETSc.InsertMode.ADD_VALUES)

    # Start global system assembly
    A.assemblyBegin()
    b.assemblyBegin()
    # End global system assembly
    A.assemblyEnd()
    b.assemblyEnd()

    # End timer
    End_assembly = getTime()

    # Elapsed time in assembly
    elapsedTimeAssembly = End_assembly-Init_assembly

    return A, b, elapsedTimeAssembly
コード例 #3
0
def setBoundaryConditions(A, b, boundaries, Istart_boundaries, Iend_boundaries,
                          rank):
    ''' Given a parallel matrix and a parallel vector, set Dirichlet boundary
    conditions.

    :param petsc matrix A: sparse and complex coefficients matrix in
                           petsc format
    :param petsc vector b: parallel right hand side
    :param petsc vector boundaries: array of boundary indexes
    :param int Istart_boundaries: init range for boundaries
    :param int Iend_boundaries: last range for boundaries
    :para int rank: MPI rank
    :return: equation system after applied Dirichlet boundary conditions
             and elapsed time
    :rtype: petsc matrix, petsc vector and float.
    '''

    PETSc.Sys.syncPrint('  Rank: ', rank, ' is setting boundary conditions')
    PETSc.Sys.syncFlush()

    # Start timer
    Init_boundaries = getTime()

    # Boundaries for LHS
    A.zeroRowsColumns(np.real(boundaries).astype(PETSc.IntType))
    # Boundaries for RHS
    numLocalBoundaries = Iend_boundaries - Istart_boundaries
    b.setValues(np.real(boundaries).astype(PETSc.IntType),
                np.zeros(numLocalBoundaries, dtype=np.complex),
                addv=PETSc.InsertMode.INSERT_VALUES)

    # Start global system assembly
    A.assemblyBegin()
    b.assemblyBegin()
    # End global system assembly
    A.assemblyEnd()
    b.assemblyEnd()

    # End timer
    End_boundaries = getTime()

    # Elapsed time in assembly
    elapsedTimeBoundaries = End_boundaries - Init_boundaries

    return A, b, elapsedTimeBoundaries
コード例 #4
0
def postProcessingFields(receivers, modelling, x, Iend_receivers,
                         Istart_receivers, edgeOrder, nodalOrder,
                         numDimensions, rank):
    ''' Compute the CSEM modelling output: primary electric field, secondary
    electric field and total electric field on receivers position.

    :param petsc matrix receivers: data receivers to compute electric fields
    :param object_modelling model: CSEM modelling with physical parameters.
    :param petsc vector x: solution vector
    :param int Iend_receivers: last range for receivers
    :param int Istart_receivers: init range for receivers
    :param int edgeOrder: order of tetrahedral edge element
    :param int nodalOrder: order of tetrahedral nodal element
    :param int numDimensions: number of dimensions
    :param int rank: MPI rank
    :return: elapsedTimepostprocessing
    :rtype: float
    '''

    # Start timer
    Init_postprocessing = getTime()

    # Number of receivers
    nReceivers = receivers.getSize()[0]
    nReceiversLocal = Iend_receivers-Istart_receivers

    # Print number of receivers per MPI task
    PETSc.Sys.Print('  Number of receivers:', nReceivers)
    PETSc.Sys.syncPrint('    Rank: ', rank, ' is post-processing ',
                        nReceiversLocal, ' receivers')
    PETSc.Sys.syncFlush()

    # Read edges-connectivity for receivers
    # Auxiliar arrays
    dataRecv = np.zeros(edgeOrder, dtype=np.float)
    edgesIdxRecv = np.zeros((nReceiversLocal, edgeOrder), dtype=PETSc.IntType)
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 19:25])
        # Edge-indexes for iRecv
        edgesIdxRecv[idx, :] = (dataRecv).astype(PETSc.IntType)
        idx += 1

    # Gather global solution of x to local vector
    # Sequential vector for gather tasks
    x_local = createSequentialVector(edgeOrder*nReceiversLocal,
                                     communicator=None)

    # Build Index set in PETSc format
    IS_edges = PETSc.IS().createGeneral(edgesIdxRecv.flatten(),
                                        comm=PETSc.COMM_WORLD)
    # Build gather vector
    gatherVector = PETSc.Scatter().create(x, IS_edges, x_local, None)
    # Ghater values
    gatherVector.scatter(x, x_local, PETSc.InsertMode.INSERT_VALUES,
                         PETSc.ScatterMode.FORWARD)

    # Post-processing electric fields
    # Create parallel structures
    EpX = createParallelVector(nReceivers, communicator=None)
    EpY = createParallelVector(nReceivers, communicator=None)
    EpZ = createParallelVector(nReceivers, communicator=None)
    EsX = createParallelVector(nReceivers, communicator=None)
    EsY = createParallelVector(nReceivers, communicator=None)
    EsZ = createParallelVector(nReceivers, communicator=None)
    EtX = createParallelVector(nReceivers, communicator=None)
    EtY = createParallelVector(nReceivers, communicator=None)
    EtZ = createParallelVector(nReceivers, communicator=None)
    EpDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EsDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EtDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)

    # Reshape auxiliar array
    dataRecv = np.zeros(numDimensions+nodalOrder*numDimensions+nodalOrder,
                        dtype=np.float)
    # Compute fields for all local receivers
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 0:19])
        # Receivers coordinates
        coordReceiver = dataRecv[0:3]
        # Element coordinates
        coordElement = dataRecv[3:15]
        # Nodal-indexes
        nodesElement = (dataRecv[15:19]).astype(PETSc.IntType)
        # Compute fields
        [EpRecv, EsRecv, EtRecv] = computeFieldsReceiver(modelling,
                                                         coordReceiver,
                                                         coordElement,
                                                         nodesElement,
                                                         x_local[idx *
                                                                 edgeOrder:
                                                                 (idx *
                                                                  edgeOrder) +
                                                                 edgeOrder],
                                                         edgeOrder,
                                                         numDimensions)
        idx += 1
        # Set primary field components
        EpX.setValue(iRecv, EpRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EpY.setValue(iRecv, EpRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EpZ.setValue(iRecv, EpRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 0, EpRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 1, EpRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 2, EpRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set secondary field components
        EsX.setValue(iRecv, EsRecv[0],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsY.setValue(iRecv, EsRecv[1],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsZ.setValue(iRecv, EsRecv[2],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 0, EsRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 1, EsRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 2, EsRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set total field components
        EtX.setValue(iRecv, EtRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EtY.setValue(iRecv, EtRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EtZ.setValue(iRecv, EtRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 0, EtRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 1, EtRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 2, EtRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)

    # Start global vector assembly
    EpX.assemblyBegin(), EpY.assemblyBegin(), EpZ.assemblyBegin()
    EsX.assemblyBegin(), EsY.assemblyBegin(), EsZ.assemblyBegin()
    EtX.assemblyBegin(), EtY.assemblyBegin(), EtZ.assemblyBegin()
    EpDense.assemblyBegin(), EsDense.assemblyBegin(), EtDense.assemblyBegin()
    # End global vector assembly
    EpX.assemblyEnd(), EpY.assemblyEnd(), EpZ.assemblyEnd()
    EsX.assemblyEnd(), EsY.assemblyEnd(), EsZ.assemblyEnd()
    EtX.assemblyEnd(), EtY.assemblyEnd(), EtZ.assemblyEnd()
    EpDense.assemblyEnd(), EsDense.assemblyEnd(), EtDense.assemblyEnd()

    # Verify if directory exists
    MASTER = 0
    if rank == MASTER:
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Petsc')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Ascii')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Matlab')

    # Print
    PETSc.Sys.Print('  Saving output:')
    # Export electric fields (petsc format)
    printMessage('    Petsc format', rank)
    # Save primary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                     EpX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                     EpY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                     EpZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Ep.dat',
                     EpDense, communicator=None)
    # Save secondary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                     EsX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                     EsY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                     EsZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Es.dat',
                     EsDense, communicator=None)
    # Save total electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                     EtX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                     EtY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                     EtZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Et.dat',
                     EtDense, communicator=None)

    # Export electric fields (Ascii and Matlab format)
    if rank == MASTER:
        # Export electric fields (Ascii format)
        # Save primary electric field
        printMessage('    Ascii format', rank)
        dataEp = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Ep.dat')
        # Save secondary electric field
        dataEs = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Es.dat')
        # Save total electric field
        dataEt = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Et.dat')
        # Export electric fields (Matlab format)
        printMessage('    Matlab format', rank)
        # Save primary electric field
        exportNumpytoMatlab(dataEp, modelling['DIR_NAME'] +
                            '/Output/Matlab/Ep.mat', electricField='Primary')
        # Save secondary electric field
        exportNumpytoMatlab(dataEs, modelling['DIR_NAME'] +
                            '/Output/Matlab/Es.mat', electricField='Secondary')
        # Save total electric field
        exportNumpytoMatlab(dataEt, modelling['DIR_NAME'] +
                            '/Output/Matlab/Et.mat', electricField='Total')
        # Remove temporal files (petsc)
        filesToDelete = [modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat.info']

        for iFile in np.arange(len(filesToDelete)):
            removeFile(filesToDelete[iFile])

    # End timer
    End_postprocessing = getTime()

    # Elapsed time in assembly
    elapsedTimepostprocessing = End_postprocessing-Init_postprocessing

    return elapsedTimepostprocessing
コード例 #5
0
ファイル: parallel.py プロジェクト: Kevin2599/petgem
def parallelAssembler(modelling, A, b, nodes, elemsE, elemsN, elemsF, facesN,
                      elemsSigma, Istart_elemsE, Iend_elemsE, nEdges, nFaces,
                      rank):
    ''' Assembly matrix A and vector b for 3D CSEM in parallel.

    :param dictionary modelling: CSEM modelling with physical parameters.
    :param petsc matrix A: left-hand side.
    :param petsc vector b: right-hand side.
    :param petsc matrix nodes: nodal coordinates.
    :param petsc matrix elemsE: elements-edges connectivity.
    :param petsc matrix elemsN: elements-nodes connectivity.
    :param petsc matrix elemsF: elements-faces connectivity.
    :param petsc matrix facesN: faces-nodes connectivity.
    :param petsc vector elemsSigma: elements-conductivity array.
    :param int Istart_elemsE: init range for assembly.
    :param int Iend_elemsE: last range for assembly.
    :param int nEdges: total number of edges in the mesh.
    :para int rank: MPI rank.
    :return: matrix A, vector b assembled, elapsedTimeAssembly.
    :rtype: petsc matrix, petsc vector and float.
    '''
    # Print information of assembly
    PETSc.Sys.syncPrint('  Rank: ', rank, ' is assembling ',
                        Iend_elemsE - Istart_elemsE, ' elements')
    PETSc.Sys.syncFlush()

    # Get order of edge elements
    nedelec_order = modelling['NEDELEC_ORDER']

    if nedelec_order == 1:  # First order edge element

        # Start timer
        Init_assembly = getTime()

        # Compute contributions for all local elements
        for iEle in np.arange(Istart_elemsE, Iend_elemsE):
            # Get coordinates of iEle
            coordEle = nodes.getRow(iEle)[1].real
            # Get nodal indexes of iEle
            nodesEle = (elemsN.getRow(iEle)[1].real).astype(PETSc.IntType)
            # Get sigma of iEle
            sigmaEle = elemsSigma.getValue(iEle).real
            # Get dofs of iEle
            dofsEle = (elemsE.getRow(iEle)[1].real).astype(PETSc.IntType)
            # Compute elemental contributions for iEle
            # Elemental matrix (Ae) and elemental vector (be)
            [Ae, be] = computeElementalContributionsMPI_FirstOrder(
                modelling, coordEle, nodesEle, sigmaEle)
            # Add local contributions to global matrix
            A.setValues(dofsEle, dofsEle, Ae, addv=PETSc.InsertMode.ADD_VALUES)
            # Add local contributions to global vector
            b.setValues(dofsEle, be, addv=PETSc.InsertMode.ADD_VALUES)

    # Second or third order edge element
    elif nedelec_order == 2 or nedelec_order == 3:

        # Start timer
        Init_assembly = getTime()

        # Compute contributions for all local elements
        for iEle in np.arange(Istart_elemsE, Iend_elemsE):
            # Get coordinates of iEle
            coordEle = nodes.getRow(iEle)[1].real
            # Get edges indexes of iEle
            edgesEle = (elemsE.getRow(iEle)[1].real).astype(PETSc.IntType)
            # Get nodal indexes of iEle
            nodesEle = (elemsN.getRow(iEle)[1].real).astype(PETSc.IntType)
            # Get faces indexes of iEle
            facesEle = (elemsF.getRow(iEle)[1].real).astype(PETSc.IntType)
            # Get nodes indexes for each face
            nodesFaceEle = (facesN.getRow(iEle)[1].real).astype(PETSc.IntType)
            nodesFaceEle = np.reshape(np.delete(nodesFaceEle, 0), (4, 3))
            # Get sigma of iEle
            sigmaEle = elemsSigma.getValue(iEle).real

            # Compute dofs of iEle
            dofsEle = computeElementDOFs(iEle, nodesEle, edgesEle, facesEle,
                                         nodesFaceEle, nEdges, nFaces,
                                         nedelec_order)
            dofsEle = dofsEle.astype(PETSc.IntType)
            # Compute elemental contributions for iEle
            # Elemental matrix (Ae) and elemental vector (be)
            [Ae, be] = computeElementalContributionsMPI_HighOrder(
                modelling, coordEle, nodesEle, sigmaEle, nedelec_order)
            # Add local contributions to global matrix
            A.setValues(dofsEle, dofsEle, Ae, addv=PETSc.InsertMode.ADD_VALUES)
            # Add local contributions to global vector
            b.setValues(dofsEle, be, addv=PETSc.InsertMode.ADD_VALUES)

    # Start global system assembly
    A.assemblyBegin()
    b.assemblyBegin()
    # End global system assembly
    A.assemblyEnd()
    b.assemblyEnd()

    # End timer
    End_assembly = getTime()

    # Elapsed time in assembly
    elapsedTimeAssembly = End_assembly - Init_assembly

    return A, b, elapsedTimeAssembly