Пример #1
0
def preprocessingNNZ(mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    _, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Since PETGEM parallelism is based on PETSc, computation of the matrix
    # sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    # V1.0 is based on linear edge finite elements which produces six dofs per
    # tetrahedral element. Hence, the tetrahedral valence is equal to 34. based
    # on this information we build the NNZ vector.

    # In order to avoid memory performance issues, add 40% to valence
    valence = 50
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Пример #2
0
def preprocessingConductivityModel(mesh_file, material_conductivities,
                                   out_dir, rank):
    ''' Preprocess conductivity model associated to a given mesh in Gmsh
    format. Here, dofs are defined for edge finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param ndarray material_conductivities: conductivity values
                                            for each material in the mesh.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Conductivity model (conductivityModel.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingConductivityModel(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsS, nElems = readGmshPhysicalGroups(mesh_file)

    # Number of materials
    nMaterials = elemsS.max()

    # Ensure that material_conductivities (user input) is equal to those
    # imported from the Gmsh file (user input)
    if rank == 0:
        if(not nMaterials == len(material_conductivities)-1):
            PETSc.Sys.Print('  The number of materials in ' + mesh_file +
                            ' is not consistent with ' +
                            'Material conductivities array. Aborting')
            exit(-1)

    # Build conductivity arrays
    conductivityModel = np.zeros(nElems, dtype=np.float)
    for iEle in np.arange(nElems):
        conductivityModel[iEle] = material_conductivities[np.int(elemsS[iEle])]

    # Build PETSc structures
    vector = createSequentialVectorWithArray(conductivityModel)

    # Delete unnecesary arrays
    del conductivityModel

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'conductivityModel.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Пример #3
0
def preprocessingDOF(mesh_file, out_dir, rank):
    ''' Preprocess degrees of freedom (DOF) and its associated data structures
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of DOFS.
    :rtype: int
    '''

    if rank == 0:
        PETSc.Sys.Print('  Degrees of freedom (dofs.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingDOF(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    dofs, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Compute faces
    elemsF, facesN = computeFaces(elemsN, nElems)

    # Compute boundary faces
    boundaryFacesN = computeBoundaryFaces(elemsF, facesN)

    # Delete unnecesary arrays
    del elemsN
    del elemsF
    del facesN

    # Compute boundary dofs
    boundaryDofs = computeBoundaryDofs(dofsNodes, boundaryFacesN)

    # Delete unnecesary arrays
    del boundaryFacesN

    # ---------- DOFS ----------
    # Get matrix dimensions
    size = dofs.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofs)

    # Delete unnecesary arrays
    del dofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofs.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- DOFS TO NODES ----------
    if rank == 0:
        PETSc.Sys.Print('  Dofs connectivity (dofsNodes.dat)')

    # Get matrix dimensions
    size = dofsNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofsNodes)

    # Delete unnecesary arrays
    del dofsNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofsNodes.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- BOUNDARY DOFs ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaryDofs)

    # Delete unnecesary arrays
    del boundaryDofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nDofs
def postProcessingFields(receivers, modelling, x, Iend_receivers,
                         Istart_receivers, edgeOrder, nodalOrder,
                         numDimensions, rank):
    ''' Compute the CSEM modelling output: primary electric field, secondary
    electric field and total electric field on receivers position.

    :param petsc matrix receivers: data receivers to compute electric fields
    :param object_modelling model: CSEM modelling with physical parameters.
    :param petsc vector x: solution vector
    :param int Iend_receivers: last range for receivers
    :param int Istart_receivers: init range for receivers
    :param int edgeOrder: order of tetrahedral edge element
    :param int nodalOrder: order of tetrahedral nodal element
    :param int numDimensions: number of dimensions
    :param int rank: MPI rank
    :return: elapsedTimepostprocessing
    :rtype: float
    '''

    # Start timer
    Init_postprocessing = getTime()

    # Number of receivers
    nReceivers = receivers.getSize()[0]
    nReceiversLocal = Iend_receivers-Istart_receivers

    # Print number of receivers per MPI task
    PETSc.Sys.Print('  Number of receivers:', nReceivers)
    PETSc.Sys.syncPrint('    Rank: ', rank, ' is post-processing ',
                        nReceiversLocal, ' receivers')
    PETSc.Sys.syncFlush()

    # Read edges-connectivity for receivers
    # Auxiliar arrays
    dataRecv = np.zeros(edgeOrder, dtype=np.float)
    edgesIdxRecv = np.zeros((nReceiversLocal, edgeOrder), dtype=PETSc.IntType)
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 19:25])
        # Edge-indexes for iRecv
        edgesIdxRecv[idx, :] = (dataRecv).astype(PETSc.IntType)
        idx += 1

    # Gather global solution of x to local vector
    # Sequential vector for gather tasks
    x_local = createSequentialVector(edgeOrder*nReceiversLocal,
                                     communicator=None)

    # Build Index set in PETSc format
    IS_edges = PETSc.IS().createGeneral(edgesIdxRecv.flatten(),
                                        comm=PETSc.COMM_WORLD)
    # Build gather vector
    gatherVector = PETSc.Scatter().create(x, IS_edges, x_local, None)
    # Ghater values
    gatherVector.scatter(x, x_local, PETSc.InsertMode.INSERT_VALUES,
                         PETSc.ScatterMode.FORWARD)

    # Post-processing electric fields
    # Create parallel structures
    EpX = createParallelVector(nReceivers, communicator=None)
    EpY = createParallelVector(nReceivers, communicator=None)
    EpZ = createParallelVector(nReceivers, communicator=None)
    EsX = createParallelVector(nReceivers, communicator=None)
    EsY = createParallelVector(nReceivers, communicator=None)
    EsZ = createParallelVector(nReceivers, communicator=None)
    EtX = createParallelVector(nReceivers, communicator=None)
    EtY = createParallelVector(nReceivers, communicator=None)
    EtZ = createParallelVector(nReceivers, communicator=None)
    EpDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EsDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)
    EtDense = createParallelDenseMatrix(nReceivers, numDimensions,
                                        communicator=None)

    # Reshape auxiliar array
    dataRecv = np.zeros(numDimensions+nodalOrder*numDimensions+nodalOrder,
                        dtype=np.float)
    # Compute fields for all local receivers
    idx = 0
    for iRecv in np.arange(Istart_receivers, Iend_receivers):
        # Get data of iRecv
        temp = np.asarray(receivers.getRow(iRecv))
        dataRecv[:] = np.real(temp[1, 0:19])
        # Receivers coordinates
        coordReceiver = dataRecv[0:3]
        # Element coordinates
        coordElement = dataRecv[3:15]
        # Nodal-indexes
        nodesElement = (dataRecv[15:19]).astype(PETSc.IntType)
        # Compute fields
        [EpRecv, EsRecv, EtRecv] = computeFieldsReceiver(modelling,
                                                         coordReceiver,
                                                         coordElement,
                                                         nodesElement,
                                                         x_local[idx *
                                                                 edgeOrder:
                                                                 (idx *
                                                                  edgeOrder) +
                                                                 edgeOrder],
                                                         edgeOrder,
                                                         numDimensions)
        idx += 1
        # Set primary field components
        EpX.setValue(iRecv, EpRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EpY.setValue(iRecv, EpRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EpZ.setValue(iRecv, EpRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 0, EpRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 1, EpRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EpDense.setValue(iRecv, 2, EpRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set secondary field components
        EsX.setValue(iRecv, EsRecv[0],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsY.setValue(iRecv, EsRecv[1],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsZ.setValue(iRecv, EsRecv[2],
                     addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 0, EsRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 1, EsRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EsDense.setValue(iRecv, 2, EsRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        # Set total field components
        EtX.setValue(iRecv, EtRecv[0], addv=PETSc.InsertMode.INSERT_VALUES)
        EtY.setValue(iRecv, EtRecv[1], addv=PETSc.InsertMode.INSERT_VALUES)
        EtZ.setValue(iRecv, EtRecv[2], addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 0, EtRecv[0],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 1, EtRecv[1],
                         addv=PETSc.InsertMode.INSERT_VALUES)
        EtDense.setValue(iRecv, 2, EtRecv[2],
                         addv=PETSc.InsertMode.INSERT_VALUES)

    # Start global vector assembly
    EpX.assemblyBegin(), EpY.assemblyBegin(), EpZ.assemblyBegin()
    EsX.assemblyBegin(), EsY.assemblyBegin(), EsZ.assemblyBegin()
    EtX.assemblyBegin(), EtY.assemblyBegin(), EtZ.assemblyBegin()
    EpDense.assemblyBegin(), EsDense.assemblyBegin(), EtDense.assemblyBegin()
    # End global vector assembly
    EpX.assemblyEnd(), EpY.assemblyEnd(), EpZ.assemblyEnd()
    EsX.assemblyEnd(), EsY.assemblyEnd(), EsZ.assemblyEnd()
    EtX.assemblyEnd(), EtY.assemblyEnd(), EtZ.assemblyEnd()
    EpDense.assemblyEnd(), EsDense.assemblyEnd(), EtDense.assemblyEnd()

    # Verify if directory exists
    MASTER = 0
    if rank == MASTER:
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Petsc')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Ascii')
        checkIfDirectoryExist(modelling['DIR_NAME'] + '/Output/Matlab')

    # Print
    PETSc.Sys.Print('  Saving output:')
    # Export electric fields (petsc format)
    printMessage('    Petsc format', rank)
    # Save primary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                     EpX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                     EpY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                     EpZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Ep.dat',
                     EpDense, communicator=None)
    # Save secondary electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                     EsX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                     EsY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                     EsZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Es.dat',
                     EsDense, communicator=None)
    # Save total electric field
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                     EtX, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                     EtY, communicator=None)
    writePetscVector(modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                     EtZ, communicator=None)
    writeDenseMatrix(modelling['DIR_NAME'] + '/Output/Petsc/Et.dat',
                     EtDense, communicator=None)

    # Export electric fields (Ascii and Matlab format)
    if rank == MASTER:
        # Export electric fields (Ascii format)
        # Save primary electric field
        printMessage('    Ascii format', rank)
        dataEp = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EpZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Ep.dat')
        # Save secondary electric field
        dataEs = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EsZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Es.dat')
        # Save total electric field
        dataEt = exportPetscToAscii(nReceivers,
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtX.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtY.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Petsc/EtZ.dat',
                                    modelling['DIR_NAME'] +
                                    '/Output/Ascii/Et.dat')
        # Export electric fields (Matlab format)
        printMessage('    Matlab format', rank)
        # Save primary electric field
        exportNumpytoMatlab(dataEp, modelling['DIR_NAME'] +
                            '/Output/Matlab/Ep.mat', electricField='Primary')
        # Save secondary electric field
        exportNumpytoMatlab(dataEs, modelling['DIR_NAME'] +
                            '/Output/Matlab/Es.mat', electricField='Secondary')
        # Save total electric field
        exportNumpytoMatlab(dataEt, modelling['DIR_NAME'] +
                            '/Output/Matlab/Et.mat', electricField='Total')
        # Remove temporal files (petsc)
        filesToDelete = [modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EpZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EsZ.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtX.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtY.dat.info',
                         modelling['DIR_NAME'] + '/Output/Petsc/EtZ.dat.info']

        for iFile in np.arange(len(filesToDelete)):
            removeFile(filesToDelete[iFile])

    # End timer
    End_postprocessing = getTime()

    # Elapsed time in assembly
    elapsedTimepostprocessing = End_postprocessing-Init_postprocessing

    return elapsedTimepostprocessing
Пример #5
0
def preprocessingNNZ(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format.

    Since PETGEM parallelism is based on PETSc, computation of the matrix
    sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    is based on tetrahedral edge finite elements of first, second and third
    order which produces:

        * 6 DOFs per element in first order discretizations
        * 20 DOFs per element in second order discretizations
        * 45 DOFs per element in third order discretizations

    Hence, the tetrahedral valence is equal to:

        * 34 in first order discretizations
        * 134 in second order discretizations
        * 363 in third order discretizations

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute number of edges
    _, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute number of faces
    elemsF, facesN = computeFaces(elemsN, nElems, nedelec_order)
    nFaces = facesN.shape[0]

    if nedelec_order == 1:  # First order edge element
        # Number of DOFs correspond to the number of edges in the mesh
        nDofs = nEdges
        # In order to avoid memory performance issues, add 20% to valence
        valence = 41
    elif nedelec_order == 2:  # Second order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(2) + nFaces * np.int(2)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 161
    elif nedelec_order == 3:  # Third order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(3) + nFaces * np.int(6) + nElems * np.int(3)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 436
    else:
        raise ValueError('Edge element order=', nedelec_order,
                         ' not supported.')

    # Build nnz pattern for each row
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
Пример #6
0
def preprocessingEdges(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess edges, edge boundaries and its associated data structures
    of a given mesh in Gmsh format. For edge finite element of linear
    order the edges are the dofs. For edge finite element of second order
    the dofs are computed in runtime based on edges and faces on each
    tetrahedral element.

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of edges.
    :rtype: int
    '''

    # ---------- Export Edges ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges (edges.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingEdges(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute edges
    elemsE, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute boundaries
    boundaries, nDofs = computeBoundaries(elemsN, nElems, edgesNodes,
                                          nedelec_order)

    # ---------- Export Edges ----------
    # Get matrix dimensions
    size = elemsE.shape
    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsE)

    # Delete unnecesary arrays
    del elemsE

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edges.dat'

    # Write PETGEM edges in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export Edges to nodes ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges connectivity (edgesNodes.dat)')

    # Get matrix dimensions
    size = edgesNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], edgesNodes)

    # Delete unnecesary arrays
    del edgesNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edgesNodes.dat'

    # Write PETGEM edgesNodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export boundaries ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaries)

    # Delete unnecesary arrays
    del boundaries

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nEdges, nDofs