def parseElements(self, fpExp, nNewElements): ''' Parse Elements from an Argus export file. ''' # Loop through line elementsRead = 0 elementIndex = len(self.elIndex) for line in fpExp: pieces = line.split() if pieces[0] == 'E': # Read indices with node increment if needed self.elements.append([int(pieces[i+2]) for i in range(3)]) self.elementValues.append([int(pieces[i+5]) for i in range(self.nEVal)]) self.elIndex.append(elementIndex) self.elOnBoundary.append(bool(int(pieces[5]))) elementsRead += 1 elementIndex += 1 if elementsRead == nNewElements: # print(f'Read {elementsRead} elements') self.nElements += elementsRead self.elements = np.array(self.elements) return elementsRead # if elementsRead != self.nEVal: myerror( f'Expected {nNewElements} elements but found {elementsRead}')
def getCheckPointVars(checkFile, varNames, Q, t=None): """ Read a variable from a firedrake checkpoint file Parameters ---------- checkFile : str checkfile name sans .h5 varNames : str or list of str Names of variables to extract Q : firedrake function space firedrake function space can be a vector space, V, but not mixed Returns ------- myVars: dict {'myVar':} """ # Ensure a list since a single str is allowed if type(varNames) is not list: varNames = [varNames] # open checkpoint myVars = {} if not os.path.exists(f'{checkFile}.h5'): myerror(f'getCheckPointVar: file {checkFile}.h5 does not exist') # Open file and read variables with firedrake.DumbCheckpoint(checkFile, mode=firedrake.FILE_READ) as chk: if t is not None: # note this only works for integer years tt, ii = chk.get_timesteps() # print(tt[-1], ii[-1], len(tt)) chk.set_timestep(t, idx=int(t - 1)) for varName in varNames: myVar = firedrake.Function(Q, name=varName) chk.load(myVar, name=varName) myVars[varName] = myVar return myVars
def parseNodes(self, fpExp, nNewNodes): ''' Parse nodal values from argus exp file''' nodesRead = 0 nodeIndex = len(self.nodeIndex) # WIth zero filled, will start with 1 for line in fpExp: if line.isspace(): # Skip blank lines continue pieces = line.split() if pieces[0] == 'N': # Append coordinates for a point self.nodes.append((float(pieces[2]), float(pieces[3]))) # parse node data values self.nodeOnBoundary.append(bool(int(pieces[4]))) self.shelfFrontNode.append(bool(int(pieces[6]))) self.nodeValues.append([float(pieces[4+i]) for i in range(self.nNVal)]) self.nodeIndex.append(nodeIndex) # Inct node index and append # I need to add a type field for multiple domain boundaries self.nodeType.append(1) nodeIndex += 1 nodesRead += 1 # Increment node counter if nodesRead == nNewNodes: # print(f'Read {nodesRead} nodes') # print(f'Node on boundary {np.sum(self.nodeOnBoundary)}') self.nNodes += nodesRead self.nodes = np.array(self.nodes) self.nodeOnBoundary = np.array(self.nodeOnBoundary) return nodesRead if nodesRead != self.nNodes: myerror(f'Expected {nNewNodes} nodes but only read {nodesRead}')
def setupMesh(meshFile, degree=2, meshOversample=None, savegmsh=False): """ Read argus mesh file and return mesh alongw ith function spaces Parameters ---------- meshFile : str argus meshfile name degree : int, optional degree of function spaces, by default 2 Returns ------- mesh firedrake mesh Q, V firedrake scalar and vectory functions """ # Input the mesh maxOversample = 4 # Arbitrary could be increased mesh, opts = argusToFiredrakeMesh(meshFile, savegmsh=savegmsh) if meshOversample is not None: numLevels = meshOversample - 1 if numLevels < 0 or numLevels > (maxOversample - 1): myerror(f'meshOverample={meshOversample} but 0 < ' 'meshOversample < 4') mesh = firedrake.MeshHierarchy(mesh, numLevels)[numLevels] # Create scalar and vector function spaces Q = firedrake.FunctionSpace(mesh, family='CG', degree=degree) V = firedrake.VectorFunctionSpace(mesh, family='CG', degree=degree) return mesh, Q, V, opts
def gmshHeader(self, gmshFile): ''' Open gmsh file and write header''' try: fpGmsh = open(gmshFile, 'w') # open file except Exception: myerror(f'error opening gmsh output file {gmshFile}') # Header info print('$MeshFormat', file=fpGmsh) print('2.2 0 8', file=fpGmsh) print('$EndMeshFormat', file=fpGmsh) return fpGmsh
def inputMeltParams(meltParams): """Read parameters for melt models Parameters ---------- meltParams : str yaml file with melt params for various models """ if not os.path.exists(meltParams): myerror(f'inputMeltParams: meltParams file ({meltParams}) not found.') with open(meltParams, 'r') as fp: meltParams = yaml.load(fp, Loader=yaml.FullLoader) return meltParams
def readExp(self, expFile): ''' Read Argus export (.exp) file. ''' try: # open file fpExp = open(expFile, 'r') except Exception: myerror(f'argusMesh.readExp: could not open meshfile {expFile} ' 'for read') # nNewNodes, nNewElements = self.parseHeader(fpExp) nodesRead = self.parseNodes(fpExp, nNewNodes) elementsRead = self.parseElements(fpExp, nNewElements) # print(nodesRead, self.nNodes, elementsRead, self.nElements) fpExp.close() return
def parseHeader(self, fpExp): ''' Parse header info from Arfua exp file. ''' headerPieces = fpExp.readline().split() if len(headerPieces) != 4: myerror('header not found') # parse header nNewNodes = int(headerPieces[1]) # Number of nodes nNewElements = int(headerPieces[0]) # Number of elements self.nNVal = int(headerPieces[3]) # Number of cols of node data self.nEVal = int(headerPieces[2]) # Number of cols of el data self.nodeValues = [[0]*self.nNVal] # Zero unused index 0 node vals self.elementValues = [[0]*self.nEVal] # Zero unused index 0 el vals print(f'Header info {nNewNodes} {nNewElements} ' f'{self.nNVal} {self.nEVal}') return nNewNodes, nNewElements
def getModelVarFromTiff(myTiff, Q): """Read a model variable from a tiff file using rasterio Parameters ---------- myTiff : str tiff file with a scalar variable Q : firedrake function space function space Returns ------- firedrake function Data from tiff """ if not os.path.exists(myTiff): myerror(f'Geometry file {myTiff} does not exist') x = rasterio.open(myTiff) return icepack.interpolate(x, Q)
def getModelGeometry(geometryFile, Q, smooth=False, alpha=2e3, zFirn=0., rhoI=rhoI, rhoW=rhoW): """Load geometry data for model and create firedrake interpolators Parameters ---------- geometryFile : str Path to a yaml file with bed, surface, thickness, and floatMask Q : firedrake function space function space smooth: bool, optional apply firedrakeSmooth to the result alpha : float, optional parameter that controls the amount of smoothing, which is approximately the smoothing lengthscale in m, by default 2e3 zFirn : float, optional Correct elevation for firn thickness (m), by default 14 m rhoI : [type], optional [description], by default rhoI rhoW : [type], optional [description], by default rhoW Returns ------- zb firedrake interp function bed elevation (m) s firedrake interp function surface elevation (m) h firedrake interp function ice thickness (m) floatMask firedrake interp function mask with 1 for floating 0 for grounded """ # load geometry files try: with open(geometryFile) as fp: geom = yaml.load(fp, Loader=yaml.FullLoader) except Exception: myerror(f'Could not open geomtery file: {geometryFile}') # Load and convert to firedrake fd = {'bed': None, 'surface': None, 'thickness': None, 'floatMask': None} # Read and process data for myVar in geom: print(myVar, geom[myVar]) fd[myVar] = getModelVarFromTiff(geom[myVar], Q) if smooth and alpha > 1 and myVar != 'floatMask': fd[myVar] = firedrakeSmooth(fd[myVar], alpha=alpha) if myVar == 'surface': fd[myVar] = icepack.interpolate(fd[myVar] - zFirn, Q) # If data are smoothed, regenerate a new mask from smoothed results. if smooth and alpha > 1: zF = flotationHeight(fd['bed'], Q, rhoI=rhoI, rhoW=rhoW) fd['floatMask'], g = flotationMask(fd['surface'], zF, Q, rhoI=rhoI, rhoW=rhoW) else: g = icepack.interpolate(fd['floatMask'] < 1, Q) # Don't allow negative values for myVar in ['surface', 'thickness']: fd[myVar] = icepack.interpolate(firedrake.max_value(10, fd[myVar]), Q) for myVar in geom: print(f'{myVar} min/max {fd[myVar].dat.data_ro.min():10.2f} ' f'{fd[myVar].dat.data_ro.max():10.2f}') return fd['bed'], fd['surface'], fd['thickness'], fd['floatMask'], g