Ejemplo n.º 1
0
def preprocessingNNZ(mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    _, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Since PETGEM parallelism is based on PETSc, computation of the matrix
    # sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    # V1.0 is based on linear edge finite elements which produces six dofs per
    # tetrahedral element. Hence, the tetrahedral valence is equal to 34. based
    # on this information we build the NNZ vector.

    # In order to avoid memory performance issues, add 40% to valence
    valence = 50
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Ejemplo n.º 2
0
def preprocessNodes(mesh_file, out_dir, rank):
    ''' Preprocess nodal coordinates of a given mesh in Gmsh format.

    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of nodes
    :rtype: int
    '''

    if rank == 0:
        PETSc.Sys.Print('  Nodal coordinates (nodes.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessNodes(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read nodes
    nodes, nNodes = readGmshNodes(mesh_file)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Build coordinates in PETGEM format where each row
    # represent the xyz coordinates of the 4 tetrahedral element
    numDimensions = 3
    nodalOrder = 4
    data = np.array((nodes[elemsN[:], :]), dtype=np.float)
    data = data.reshape(nElems, numDimensions*nodalOrder)

    # Delete unnecesary arrays
    del nodes
    del elemsN

    # Get matrix dimensions
    size = data.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], data)

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nodes.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    return nNodes
Ejemplo n.º 3
0
def preprocessingNodalConnectivity(mesh_file, out_dir, rank):
    ''' Preprocess nodal connectivity of a given mesh in Gmsh format.

    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of tetrahedral elements
    :rtype: int
    '''

    if rank == 0:
        PETSc.Sys.Print('  Nodal connectivity (elemsN.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNodalConnectivity(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Get matrix dimensions
    size = elemsN.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsN)

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'meshConnectivity.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    return nElems
Ejemplo n.º 4
0
def preprocessingDataReceivers(mesh_file, receivers_file,
                               out_dir, rank):
    ''' Preprocess conductivity model associated to a given mesh in Gmsh
    format. Here, dofs are defined for edge finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param str receivers_file: receiver positions file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: None
    '''
    from scipy.spatial import Delaunay

    if rank == 0:
        PETSc.Sys.Print('  Receiver positions (receivers.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingDataReceivers(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Check if receivers_file exist
    success = checkFilePath(receivers_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingDataReceivers(): file ' + receivers_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read receivers_file
    receivers = np.loadtxt(receivers_file)

    # Number of receivers
    if receivers.ndim == 1:
        nReceivers = 1
    else:
        dim = receivers.shape
        nReceivers = dim[0]

    # Read nodes
    nodes, nNodes = readGmshNodes(mesh_file)

    # Build Delaunay triangulation with nodes
    tri = Delaunay(nodes)

    # Delete unnecesary arrays
    del nodes

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    dofs, _ = computeDofs(elemsN, nElems)

    # Overwrite Delaunay structure with mesh_file connectivity
    tri.simplices = elemsN.astype(np.int32)

    # Delete unnecesary arrays
    del elemsN

    # Find out which tetrahedral element points are in
    recvElems = tri.find_simplex(receivers)

    # Determine if all receiver points were found
    idx = np.where(recvElems < 0)[0]

    # If idx is not empty, there are receivers outside the domain
    if idx.size != 0:
        PETSc.Sys.Print('     Some receivers are were not located')
        PETSc.Sys.Print('     Following ID-receivers will not be taken ' +
                        'into account: ')
        PETSc.Sys.Print(idx)

        # Update number of receivers
        nReceivers = nReceivers - len(idx)

        if nReceivers == 0:
            PETSc.Sys.Print('     No receiver has been found. Nothing to do.'
                            ' Aborting')
            exit(-1)

        # Remove idx from receivers matrix
        receivers = np.delete(receivers, idx, axis=0)

        # Create new file with located points coordinates
        # Build path to save the file
        out_path = out_dir + 'receiversPETGEM.txt'
        PETSc.Sys.Print('     Saving file with localized receiver positions ' +
                        '(receiversPETGEM.txt)')
        np.savetxt(out_path, receivers, fmt='%1.8e')

    # Allocate space for receives in PETGEM format
    numDimensions = 3
    nodalOrder = 4
    edgeOrder = 6
    allocate = numDimensions+nodalOrder*numDimensions+nodalOrder+edgeOrder
    tmp = np.zeros((nReceivers, allocate), dtype=np.float)

    # Fill tmp matrix with receiver positions, element coordinates and
    # nodal indexes
    for iReceiver in np.arange(nReceivers):
        # If there is one receiver
        if nReceivers == 1:
            # Get receiver coordinates
            coordiReceiver = receivers[0:]
            # Get element coordinates (container element)
            coordElement = tri.points[tri.simplices[recvElems, :]]
            coordElement = coordElement.flatten()
            # Get nodal indexes (container element)
            nodesElement = tri.simplices[recvElems, :]
            # Get element-dofs indices (container element)
            dofsElement = dofs[recvElems, :]
        # If there are more than one receivers
        else:
            # Get receiver coordinates
            coordiReceiver = receivers[iReceiver, :]
            # Get element coordinates (container element)
            coordElement = tri.points[tri.simplices[recvElems[iReceiver], :]]
            coordElement = coordElement.flatten()
            # Get nodal indexes (container element)
            nodesElement = tri.simplices[recvElems[iReceiver], :]
            # Get element-dofs indices (container element)
            dofsElement = dofs[recvElems[iReceiver], :]
        # Insert data for iReceiver
        tmp[iReceiver, 0:3] = coordiReceiver
        tmp[iReceiver, 3:15] = coordElement
        tmp[iReceiver, 15:19] = nodesElement
        tmp[iReceiver, 19:] = dofsElement

    # Delete unnecesary arrays
    del tri
    del dofs

    # Get matrix dimensions
    size = tmp.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], tmp)

    # Delete unnecesary arrays
    del tmp

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'receivers.dat'

    # Write PETGEM receivers in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    return nReceivers
Ejemplo n.º 5
0
def preprocessingConductivityModel(mesh_file, material_conductivities,
                                   out_dir, rank):
    ''' Preprocess conductivity model associated to a given mesh in Gmsh
    format. Here, dofs are defined for edge finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param ndarray material_conductivities: conductivity values
                                            for each material in the mesh.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Conductivity model (conductivityModel.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingConductivityModel(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsS, nElems = readGmshPhysicalGroups(mesh_file)

    # Number of materials
    nMaterials = elemsS.max()

    # Ensure that material_conductivities (user input) is equal to those
    # imported from the Gmsh file (user input)
    if rank == 0:
        if(not nMaterials == len(material_conductivities)-1):
            PETSc.Sys.Print('  The number of materials in ' + mesh_file +
                            ' is not consistent with ' +
                            'Material conductivities array. Aborting')
            exit(-1)

    # Build conductivity arrays
    conductivityModel = np.zeros(nElems, dtype=np.float)
    for iEle in np.arange(nElems):
        conductivityModel[iEle] = material_conductivities[np.int(elemsS[iEle])]

    # Build PETSc structures
    vector = createSequentialVectorWithArray(conductivityModel)

    # Delete unnecesary arrays
    del conductivityModel

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'conductivityModel.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Ejemplo n.º 6
0
def preprocessingDOF(mesh_file, out_dir, rank):
    ''' Preprocess degrees of freedom (DOF) and its associated data structures
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of DOFS.
    :rtype: int
    '''

    if rank == 0:
        PETSc.Sys.Print('  Degrees of freedom (dofs.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingDOF(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    dofs, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Compute faces
    elemsF, facesN = computeFaces(elemsN, nElems)

    # Compute boundary faces
    boundaryFacesN = computeBoundaryFaces(elemsF, facesN)

    # Delete unnecesary arrays
    del elemsN
    del elemsF
    del facesN

    # Compute boundary dofs
    boundaryDofs = computeBoundaryDofs(dofsNodes, boundaryFacesN)

    # Delete unnecesary arrays
    del boundaryFacesN

    # ---------- DOFS ----------
    # Get matrix dimensions
    size = dofs.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofs)

    # Delete unnecesary arrays
    del dofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofs.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- DOFS TO NODES ----------
    if rank == 0:
        PETSc.Sys.Print('  Dofs connectivity (dofsNodes.dat)')

    # Get matrix dimensions
    size = dofsNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofsNodes)

    # Delete unnecesary arrays
    del dofsNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofsNodes.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- BOUNDARY DOFs ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaryDofs)

    # Delete unnecesary arrays
    del boundaryDofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nDofs
def postProcessingFields(receivers, modelling, x, Iend_receivers,
                         Istart_receivers, edgeOrder, nodalOrder,
                         numDimensions, rank):
    ''' Compute the CSEM modelling output: primary electric field, secondary
    electric field and total electric field on receivers position.

    :param petsc matrix receivers: data receivers to compute electric fields
    :param object_modelling model: CSEM modelling with physical parameters.
    :param petsc vector x: solution vector
    :param int Iend_receivers: last range for receivers
    :param int Istart_receivers: init range for receivers
    :param int edgeOrder: order of tetrahedral edge element
    :param int nodalOrder: order of tetrahedral nodal element
    :param int numDimensions: number of dimensions
    :param int rank: MPI rank
    :return: elapsedTimepostprocessing
    :rtype: float
    '''

    # Start timer
    Init_postprocessing = getTime()

    # Number of receivers
    nReceivers = receivers.getSize()[0]
    nReceiversLocal = Iend_receivers-Istart_receivers

    # Print number of receivers per MPI task
    PETSc.Sys.Print('  Number of receivers:', nReceivers)
    PETSc.Sys.syncPrint('    Rank: ', rank, ' is post-processing ',
                        nReceiversLocal, ' receivers')
    PETSc.Sys.syncFlush()

    # Read edges-connectivity for receivers
    # Auxiliar arrays
    dataRecv = np.zeros(edgeOrder, dtype=np.float)
    edgesIdxRecv = np.zeros((nReceiversLocal, edgeOrder), dtype=PETSc.IntType)
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 19:25])
        # Edge-indexes for iRecv
        edgesIdxRecv[idx, :] = (dataRecv).astype(PETSc.IntType)
        idx += 1

    # Gather global solution of x to local vector
    # Sequential vector for gather tasks
    x_local = createSequentialVector(edgeOrder*nReceiversLocal,
                                     communicator=None)

    # Build Index set in PETSc format
    IS_edges = PETSc.IS().createGeneral(edgesIdxRecv.flatten(),
                                        comm=PETSc.COMM_WORLD)
    # Build gather vector
    gatherVector = PETSc.Scatter().create(x, IS_edges, x_local, None)
    # Ghater values
    gatherVector.scatter(x, x_local, PETSc.InsertMode.INSERT_VALUES,
                         PETSc.ScatterMode.FORWARD)

    # Post-processing electric fields
    # Create parallel structures
    EpX = createParallelVector(nReceivers, communicator=None)
    EpY = createParallelVector(nReceivers, communicator=None)
    EpZ = createParallelVector(nReceivers, communicator=None)
    EsX = createParallelVector(nReceivers, communicator=None)
    EsY = createParallelVector(nReceivers, communicator=None)
    EsZ = createParallelVector(nReceivers, communicator=None)
    EtX = createParallelVector(nReceivers, communicator=None)
    EtY = createParallelVector(nReceivers, communicator=None)
    EtZ = createParallelVector(nReceivers, communicator=None)
    EpDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EsDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EtDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)

    # Reshape auxiliar array
    dataRecv = np.zeros(numDimensions+nodalOrder*numDimensions+nodalOrder,
                        dtype=np.float)
    # Compute fields for all local receivers
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 0:19])
        # Receivers coordinates
        coordReceiver = dataRecv[0:3]
        # Element coordinates
        coordElement = dataRecv[3:15]
        # Nodal-indexes
        nodesElement = (dataRecv[15:19]).astype(PETSc.IntType)
        # Compute fields
        [EpRecv, EsRecv, EtRecv] = computeFieldsReceiver(modelling,
                                                         coordReceiver,
                                                         coordElement,
                                                         nodesElement,
                                                         x_local[idx *
                                                                 edgeOrder:
                                                                 (idx *
                                                                  edgeOrder) +
                                                                 edgeOrder],
                                                         edgeOrder,
                                                         numDimensions)
        idx += 1
        # Set primary field components
        EpX.setValue(iRecv, EpRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EpY.setValue(iRecv, EpRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EpZ.setValue(iRecv, EpRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 0, EpRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 1, EpRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 2, EpRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set secondary field components
        EsX.setValue(iRecv, EsRecv[0],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsY.setValue(iRecv, EsRecv[1],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsZ.setValue(iRecv, EsRecv[2],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 0, EsRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 1, EsRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 2, EsRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set total field components
        EtX.setValue(iRecv, EtRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EtY.setValue(iRecv, EtRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EtZ.setValue(iRecv, EtRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 0, EtRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 1, EtRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 2, EtRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)

    # Start global vector assembly
    EpX.assemblyBegin(), EpY.assemblyBegin(), EpZ.assemblyBegin()
    EsX.assemblyBegin(), EsY.assemblyBegin(), EsZ.assemblyBegin()
    EtX.assemblyBegin(), EtY.assemblyBegin(), EtZ.assemblyBegin()
    EpDense.assemblyBegin(), EsDense.assemblyBegin(), EtDense.assemblyBegin()
    # End global vector assembly
    EpX.assemblyEnd(), EpY.assemblyEnd(), EpZ.assemblyEnd()
    EsX.assemblyEnd(), EsY.assemblyEnd(), EsZ.assemblyEnd()
    EtX.assemblyEnd(), EtY.assemblyEnd(), EtZ.assemblyEnd()
    EpDense.assemblyEnd(), EsDense.assemblyEnd(), EtDense.assemblyEnd()

    # Verify if directory exists
    MASTER = 0
    if rank == MASTER:
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Petsc')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Ascii')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Matlab')

    # Print
    PETSc.Sys.Print('  Saving output:')
    # Export electric fields (petsc format)
    printMessage('    Petsc format', rank)
    # Save primary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                     EpX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                     EpY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                     EpZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Ep.dat',
                     EpDense, communicator=None)
    # Save secondary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                     EsX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                     EsY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                     EsZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Es.dat',
                     EsDense, communicator=None)
    # Save total electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                     EtX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                     EtY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                     EtZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Et.dat',
                     EtDense, communicator=None)

    # Export electric fields (Ascii and Matlab format)
    if rank == MASTER:
        # Export electric fields (Ascii format)
        # Save primary electric field
        printMessage('    Ascii format', rank)
        dataEp = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Ep.dat')
        # Save secondary electric field
        dataEs = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Es.dat')
        # Save total electric field
        dataEt = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Et.dat')
        # Export electric fields (Matlab format)
        printMessage('    Matlab format', rank)
        # Save primary electric field
        exportNumpytoMatlab(dataEp, modelling['DIR_NAME'] +
                            '/Output/Matlab/Ep.mat', electricField='Primary')
        # Save secondary electric field
        exportNumpytoMatlab(dataEs, modelling['DIR_NAME'] +
                            '/Output/Matlab/Es.mat', electricField='Secondary')
        # Save total electric field
        exportNumpytoMatlab(dataEt, modelling['DIR_NAME'] +
                            '/Output/Matlab/Et.mat', electricField='Total')
        # Remove temporal files (petsc)
        filesToDelete = [modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat.info']

        for iFile in np.arange(len(filesToDelete)):
            removeFile(filesToDelete[iFile])

    # End timer
    End_postprocessing = getTime()

    # Elapsed time in assembly
    elapsedTimepostprocessing = End_postprocessing-Init_postprocessing

    return elapsedTimepostprocessing
Ejemplo n.º 8
0
def preprocessingNNZ(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format.

    Since PETGEM parallelism is based on PETSc, computation of the matrix
    sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    is based on tetrahedral edge finite elements of first, second and third
    order which produces:

        * 6 DOFs per element in first order discretizations
        * 20 DOFs per element in second order discretizations
        * 45 DOFs per element in third order discretizations

    Hence, the tetrahedral valence is equal to:

        * 34 in first order discretizations
        * 134 in second order discretizations
        * 363 in third order discretizations

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute number of edges
    _, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute number of faces
    elemsF, facesN = computeFaces(elemsN, nElems, nedelec_order)
    nFaces = facesN.shape[0]

    if nedelec_order == 1:  # First order edge element
        # Number of DOFs correspond to the number of edges in the mesh
        nDofs = nEdges
        # In order to avoid memory performance issues, add 20% to valence
        valence = 41
    elif nedelec_order == 2:  # Second order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(2) + nFaces * np.int(2)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 161
    elif nedelec_order == 3:  # Third order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(3) + nFaces * np.int(6) + nElems * np.int(3)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 436
    else:
        raise ValueError('Edge element order=', nedelec_order,
                         ' not supported.')

    # Build nnz pattern for each row
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Ejemplo n.º 9
0
def preprocessingFaces(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess faces, face boundaries and its associated data structures
    of a given mesh in Gmsh format.

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of edges.
    :rtype: int
    '''

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingFaces(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute faces
    elemsF, facesN = computeFaces(elemsN, nElems, nedelec_order)

    # Number of faces
    size = facesN.shape
    nFaces = size[0]

    # Delete unnecesary arrays
    del elemsN

    # ---------- Export Faces connectivity ----------
    if rank == 0:
        PETSc.Sys.Print('  Faces connectivity (faces.dat)')

    # Get matrix dimensions
    size = elemsF.shape
    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsF)

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'faces.dat'

    # Write PETGEM edges in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export Faces connectivity ----------
    if rank == 0:
        PETSc.Sys.Print('  Faces-nodes connectivity (facesN.dat)')

    # Build facesN connectivity for each element
    nNodes_face = 3
    nFaces_element = 4
    # Allocate matrix with an additional position to store the total number of
    # faces in the mesh
    facesN_tmp = np.zeros((nElems, nNodes_face * nFaces_element + 1))

    for iEle in np.arange(nElems):
        facesElement = elemsF[iEle, :]
        nodesFace = facesN[facesElement, :]
        facesN_tmp[iEle, 0] = nFaces
        facesN_tmp[iEle, 1:] = nodesFace.reshape(1,
                                                 nNodes_face * nFaces_element)

    # Get matrix dimensions
    size = facesN_tmp.shape
    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], facesN_tmp)

    # Delete unnecesary arrays
    del elemsF
    del facesN
    del facesN_tmp

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'facesNodes.dat'

    # Write PETGEM edges in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    return nFaces
Ejemplo n.º 10
0
def preprocessingEdges(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess edges, edge boundaries and its associated data structures
    of a given mesh in Gmsh format. For edge finite element of linear
    order the edges are the dofs. For edge finite element of second order
    the dofs are computed in runtime based on edges and faces on each
    tetrahedral element.

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of edges.
    :rtype: int
    '''

    # ---------- Export Edges ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges (edges.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingEdges(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute edges
    elemsE, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute boundaries
    boundaries, nDofs = computeBoundaries(elemsN, nElems, edgesNodes,
                                          nedelec_order)

    # ---------- Export Edges ----------
    # Get matrix dimensions
    size = elemsE.shape
    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsE)

    # Delete unnecesary arrays
    del elemsE

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edges.dat'

    # Write PETGEM edges in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export Edges to nodes ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges connectivity (edgesNodes.dat)')

    # Get matrix dimensions
    size = edgesNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], edgesNodes)

    # Delete unnecesary arrays
    del edgesNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edgesNodes.dat'

    # Write PETGEM edgesNodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export boundaries ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaries)

    # Delete unnecesary arrays
    del boundaries

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nEdges, nDofs