예제 #1
0
def preprocessingNNZ(mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    _, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Since PETGEM parallelism is based on PETSc, computation of the matrix
    # sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    # V1.0 is based on linear edge finite elements which produces six dofs per
    # tetrahedral element. Hence, the tetrahedral valence is equal to 34. based
    # on this information we build the NNZ vector.

    # In order to avoid memory performance issues, add 40% to valence
    valence = 50
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
예제 #2
0
def preprocessingConductivityModel(mesh_file, material_conductivities,
                                   out_dir, rank):
    ''' Preprocess conductivity model associated to a given mesh in Gmsh
    format. Here, dofs are defined for edge finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param ndarray material_conductivities: conductivity values
                                            for each material in the mesh.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Conductivity model (conductivityModel.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingConductivityModel(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsS, nElems = readGmshPhysicalGroups(mesh_file)

    # Number of materials
    nMaterials = elemsS.max()

    # Ensure that material_conductivities (user input) is equal to those
    # imported from the Gmsh file (user input)
    if rank == 0:
        if(not nMaterials == len(material_conductivities)-1):
            PETSc.Sys.Print('  The number of materials in ' + mesh_file +
                            ' is not consistent with ' +
                            'Material conductivities array. Aborting')
            exit(-1)

    # Build conductivity arrays
    conductivityModel = np.zeros(nElems, dtype=np.float)
    for iEle in np.arange(nElems):
        conductivityModel[iEle] = material_conductivities[np.int(elemsS[iEle])]

    # Build PETSc structures
    vector = createSequentialVectorWithArray(conductivityModel)

    # Delete unnecesary arrays
    del conductivityModel

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'conductivityModel.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
예제 #3
0
def preprocessingNNZ(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess sparsity pattern (NNZ) for parallel matrix allocation
    of a given mesh in Gmsh format.

    Since PETGEM parallelism is based on PETSc, computation of the matrix
    sparsity pattern is critical in sake of performance. Furthermore, PETGEM
    is based on tetrahedral edge finite elements of first, second and third
    order which produces:

        * 6 DOFs per element in first order discretizations
        * 20 DOFs per element in second order discretizations
        * 45 DOFs per element in third order discretizations

    Hence, the tetrahedral valence is equal to:

        * 34 in first order discretizations
        * 134 in second order discretizations
        * 363 in third order discretizations

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param int rank: MPI rank.
    :return: None
    '''

    if rank == 0:
        PETSc.Sys.Print('  Sparsity pattern (nnz.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingNNZ(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute number of edges
    _, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute number of faces
    elemsF, facesN = computeFaces(elemsN, nElems, nedelec_order)
    nFaces = facesN.shape[0]

    if nedelec_order == 1:  # First order edge element
        # Number of DOFs correspond to the number of edges in the mesh
        nDofs = nEdges
        # In order to avoid memory performance issues, add 20% to valence
        valence = 41
    elif nedelec_order == 2:  # Second order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(2) + nFaces * np.int(2)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 161
    elif nedelec_order == 3:  # Third order edge element
        # Number of DOFs
        nDofs = nEdges * np.int(3) + nFaces * np.int(6) + nElems * np.int(3)
        # In order to avoid memory performance issues, add 20% to valence
        valence = 436
    else:
        raise ValueError('Edge element order=', nedelec_order,
                         ' not supported.')

    # Build nnz pattern for each row
    nnz = np.full((nDofs), valence, dtype=np.int)

    # Build PETSc structures
    vector = createSequentialVectorWithArray(nnz)

    # Delete unnecesary arrays
    del nnz

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'nnz.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return
예제 #4
0
def preprocessingDOF(mesh_file, out_dir, rank):
    ''' Preprocess degrees of freedom (DOF) and its associated data structures
    of a given mesh in Gmsh format. Here, dofs are defined for edge
    finite element computations.

    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of DOFS.
    :rtype: int
    '''

    if rank == 0:
        PETSc.Sys.Print('  Degrees of freedom (dofs.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingDOF(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute dofs
    dofs, dofsNodes = computeDofs(elemsN, nElems)
    nDofs = dofsNodes.shape[0]

    # Compute faces
    elemsF, facesN = computeFaces(elemsN, nElems)

    # Compute boundary faces
    boundaryFacesN = computeBoundaryFaces(elemsF, facesN)

    # Delete unnecesary arrays
    del elemsN
    del elemsF
    del facesN

    # Compute boundary dofs
    boundaryDofs = computeBoundaryDofs(dofsNodes, boundaryFacesN)

    # Delete unnecesary arrays
    del boundaryFacesN

    # ---------- DOFS ----------
    # Get matrix dimensions
    size = dofs.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofs)

    # Delete unnecesary arrays
    del dofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofs.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- DOFS TO NODES ----------
    if rank == 0:
        PETSc.Sys.Print('  Dofs connectivity (dofsNodes.dat)')

    # Get matrix dimensions
    size = dofsNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], dofsNodes)

    # Delete unnecesary arrays
    del dofsNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'dofsNodes.dat'

    # Write PETGEM nodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- BOUNDARY DOFs ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaryDofs)

    # Delete unnecesary arrays
    del boundaryDofs

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nDofs
예제 #5
0
def preprocessingEdges(nedelec_order, mesh_file, out_dir, rank):
    ''' Preprocess edges, edge boundaries and its associated data structures
    of a given mesh in Gmsh format. For edge finite element of linear
    order the edges are the dofs. For edge finite element of second order
    the dofs are computed in runtime based on edges and faces on each
    tetrahedral element.

    :param int nedelec_order: nedelec element order.
    :param str mesh_file: mesh file name to be preprocess.
    :param str out_dir: path for output.
    :param int rank: MPI rank.
    :return: number of edges.
    :rtype: int
    '''

    # ---------- Export Edges ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges (edges.dat)')

    # Check if mesh_file exist
    success = checkFilePath(mesh_file)

    if rank == 0:
        if not success:
            msg = ('  preprocessingEdges(): file ' + mesh_file +
                   ' does not exist.')
            raise ValueError(msg)

    # Read connectivity
    elemsN, nElems = readGmshConnectivity(mesh_file)

    # Compute edges
    elemsE, edgesNodes = computeEdges(elemsN, nElems, nedelec_order)
    nEdges = edgesNodes.shape[0]

    # Compute boundaries
    boundaries, nDofs = computeBoundaries(elemsN, nElems, edgesNodes,
                                          nedelec_order)

    # ---------- Export Edges ----------
    # Get matrix dimensions
    size = elemsE.shape
    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], elemsE)

    # Delete unnecesary arrays
    del elemsE

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edges.dat'

    # Write PETGEM edges in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export Edges to nodes ----------
    if rank == 0:
        PETSc.Sys.Print('  Edges connectivity (edgesNodes.dat)')

    # Get matrix dimensions
    size = edgesNodes.shape

    # Build PETSc structures
    matrix = createSequentialDenseMatrixWithArray(size[0], size[1], edgesNodes)

    # Delete unnecesary arrays
    del edgesNodes

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'edgesNodes.dat'

    # Write PETGEM edgesNodes in PETSc format
    writeParallelDenseMatrix(out_path, matrix, communicator=PETSc.COMM_SELF)

    # ---------- Export boundaries ----------
    if rank == 0:
        PETSc.Sys.Print('  Boundaries (boundaries.dat)')

    # Build PETSc structures
    vector = createSequentialVectorWithArray(boundaries)

    # Delete unnecesary arrays
    del boundaries

    # Verify if OUT_DIR exists
    checkIfDirectoryExist(out_dir)

    # Build path to save the file
    out_path = out_dir + 'boundaries.dat'

    # Write PETGEM nodes in PETSc format
    writePetscVector(out_path, vector, communicator=PETSc.COMM_SELF)

    return nEdges, nDofs