def getCheckPointVars(checkFile, varNames, Q, t=None): """ Read a variable from a firedrake checkpoint file Parameters ---------- checkFile : str checkfile name sans .h5 varNames : str or list of str Names of variables to extract Q : firedrake function space firedrake function space can be a vector space, V, but not mixed Returns ------- myVars: dict {'myVar':} """ # Ensure a list since a single str is allowed if type(varNames) is not list: varNames = [varNames] # open checkpoint myVars = {} if not os.path.exists(f'{checkFile}.h5'): myerror(f'getCheckPointVar: file {checkFile}.h5 does not exist') with firedrake.DumbCheckpoint(checkFile, mode=firedrake.FILE_READ) as chk: if t is not None: print(t) chk.set_timestep(t) for varName in varNames: myVar = firedrake.Function(Q, name=varName) chk.load(myVar, name=varName) myVars[varName] = myVar return myVars
def readLatlon(self, latlon=None, datFile=None): ''' Reads lat/lon file - using set filenames, or specifiy with latlon=basefilename ''' # read datFile if needed. if self.nr < 1 or self.na < 1: try: self.readOffsetsDat(datFile=datFile) except Exception: myerror(f'Could not read lat/lon dat file {datFile}') # if latlon is not None: self.latlonName(latlon=latlon) # if os.path.exists(self.latFile) and os.path.exists(self.lonFile): if self.verbose: print(f'Reading {self.latFile} and {self.lonFile} with size ' f'{self.nr}x{self.na}') self.lat = readImage(self.latFile, self.nr, self.na, '>f8') self.lon = readImage(self.lonFile, self.nr, self.na, '>f8') else: myerror(f'Offsets tried to read an invalid lat or lon file ' f'- {self.latFile} {self.lonFile}')
def readOffsets(self, fileRoot=None, rangeFile=None, datFile=None): ''' Read da/dr offset file at previously set up offset names, or specify directly with values as defined for init. ''' # over ride names if needed. if len(self.azimuthFile) == 0 or rangeFile is not None \ or fileRoot is not None: self.offsetFileNames(fileRoot, rangeFile=None) # read datFile if needed. if self.nr < 1 or self.na < 1: self.readOffsetsDat(datFile=datFile) # if os.path.exists(self.azimuthFile) and os.path.exists(self.rangeFile): if self.verbose: print(f'Reading {self.azimuthFile} of size {self.nr}x' f'{self.na}') self.azOff = readImage(self.azimuthFile, self.nr, self.na, '>f4') self.azOff[np.isnan(self.azOff)] = -2.e9 if self.verbose: print(f'Reading {self.rangeFile} of size {self.nr}x{self.na}') self.rgOff = readImage(self.rangeFile, self.nr, self.na, '>f4') self.rgOff[np.isnan(self.rgOff)] = -2.e9 else: myerror(f'Offsets tried to read offset files: ' f'{self.azimuthFile} {self.rangeFile}') return self.rgOff.astype(float), self.azOff.astype(float)
def readGeodat(self, geoDatFile): """ Read XXX.geodat file - for now skip any projection info """ try: fgeo = open(geoDatFile, 'r') skip = [';', '#'] count = 0 if self.verbose: print('Reading ', geoDatFile) for line in fgeo: if not any(s in line for s in skip): tmp = line.strip('/n').split() if count == 0: # int(float()) avoids problem with floating point # input - e.g., 1000.00 self.xs = int(float(tmp[0])) self.ys = int(float(tmp[1])) count += 1 elif count == 1: self.dx = float(tmp[0]) self.dy = float(tmp[1]) count += 1 elif count == 2: self.x0 = float(tmp[0]) self.y0 = float(tmp[1]) break fgeo.close except Exception: myerror('Error reading geodat file '+geoDatFile)
def argusToFiredrakeMesh(meshFile, savegmsh=False): """Convert an argus mesh to firedrake Parameters ---------- meshFile : string File name of an Argus mesh export savegmsh : bool Save gmsh file Return ---------- mesh : firedrake mesh opts : dict """ myMesh = m.argusMesh(meshFile) # Input the mesh # Quick Check that file is a value argus mesh if '.exp' not in meshFile: u.myerror(f'Invalid mesh file [{meshFile}]: missing .exp') # create unique ide to avoid multiple jobs overwriting myId = f'.{uuid.uuid4().hex[:8]}.msh' gmshFile = meshFile.replace(".exp", myId) myMesh.toGmsh(gmshFile) # Save in gmsh format mesh = firedrake.Mesh(gmshFile) # delete gmsh file if not savegmsh: os.remove(gmshFile) opts = readOpts(meshFile) # return mesh return mesh, opts
def handle_data(self, data): if 'EOF' in data: if 'S1A' in data: self.S1A = data elif 'S1B' in data: self.S1B = data else: u.myerror('invalid orbit file')
def ccFileName(self, ccFile=None): if ccFile is not None: self.ccFile = ccFile return if '.da' in self.fileRoot: self.ccFile = self.fileRoot.replace('.da', '.cc') return myerror(f'Do not know how to make cc file name for {self.fileRoot}')
def myPrompt(promptText,abort=None) : ''' Prompt for y or n and return true or false - optionally abort if abort=True''' while(1) : ans=input('\n\033[1m {0:s} [y/n] \033[0m\n'.format(promptText)) if ans.lower() == 'y' : return True if ans.lower() == 'n' : if abort : myerror("User prompted abort") return False
def readSMB(SMBfile, Q): ''' Read SMB file an limit values to +/- 6 to avoid no data values Returns water equivalent values. ''' if not os.path.exists: myerror(f'readSMB: SMB file ({SMBfile}) does not exist') SMB = mf.getModelVarFromTiff(SMBfile, Q) # avoid any unreasonably large value SMB = icepack.interpolate( firedrake.max_value(firedrake.min_value(SMB, 6), -6), Q) return SMB
def setupFriction(forwardParams): ''' Error check and return friction law specified by forwardParams ''' try: frictionLaw = { 'weertman': mf.weertmanFriction, 'schoof': mf.schoofFriction }[forwardParams['friction']] except Exception: myerror(f'setupFriction: Invalid friction law: ' f'{forwardParams["friction"]}') return frictionLaw
def __init__(self, fileRoot=None, echo=False): # if fileRoot == None: fileRoot = 'log' self.logFile = 'log.{0:s}.{1:s}'.format( fileRoot, datetime.now().strftime('%Y-%m-%d.%H-%M-%S')) try: print('Opening log file: ' + self.logFile) self.fp = open(self.logFile, 'w') except: myerror('Could not open logfile {0:s}'.format(self.logFile))
def inputMeltParams(meltParams): """Read parameters for melt models Parameters ---------- meltParams : str yaml file with melt params for various models """ if not os.path.exists(meltParams): myerror(f'inputMeltParams: meltParams file ({meltParams}) not found.') with open(meltParams, 'r') as fp: meltParams = yaml.load(fp, Loader=yaml.FullLoader) return meltParams
def getMask(self, maskFile=None): ''' Return mask - force read if not already ready ''' if len(self.mask) <= 0: self.readMask(maskFile) # check again in case it faile if len(self.mask) <= 0: self.mask = np.zeros((self.na, self.nr), dtype=np.byte) myerror(' Could not read mask - check files exist - returning 0s') # return self.mask
def readLSdat(self, LSDatFile, printData=False): myDict = {} try: fp = open(LSDatFile, 'r') # crude check sum for line in fp: pieces = line.split('=') if len(pieces) == 2: myDict[pieces[0].strip()] = pieces[1].strip() fp.close() # now stuff values self.fileEarly = myDict['fileEarly'] self.fileLate = myDict['fileLate'] self.x0 = float(myDict['x0']) / 1000. self.y0 = float(myDict['y0']) / 1000. self.dxImage = float(myDict['dx']) self.dyImage = float(myDict['dy']) self.stepX = int(myDict['stepX']) self.stepY = int(myDict['stepY']) self.xs = int(myDict['nx']) self.ys = int(myDict['ny']) self.slowFlag = int(myDict['slowFlag']) > 0 self.domain = int(myDict['EPSG']) self.JD1 = float(myDict['earlyImageJD']) self.JD2 = float(myDict['lateImageJD']) self.successRate = \ float(myDict['Success_rate_for_attempted_matches(%)']) self.culledRate = \ self.tryOptParam('Culled_rate_for_attempted_matches(%)', myDict, float) self.sigmaX = self.tryOptParam('Mean_sigmaX', myDict, float) self.sigmaY = self.tryOptParam('Mean_sigmaY', myDict, float) self.dx = self.dxImage * self.stepX self.dy = self.dyImage * self.stepY self.date1 = datetime.strptime('2000:01:01', "%Y:%m:%d") + \ timedelta(days=self.JD1 - 2451544.5) self.date2 = datetime.strptime('2000:01:01', "%Y:%m:%d") + \ timedelta(days=self.JD2 - 2451544.5) # if printData: print(self.fileEarly) print(self.fileLate) print(self.x0, self.y0) print(self.dx, self.dy) print(self.stepX, self.stepY) print(self.xs, self.ys) print(self.domain) print(self.date1, self.date2) print(self.successRate, self.culledRate) print(self.sigmaX, self.sigmaY) except Exception: myerror("Problem reading lsdat file " + LSDatFile)
def getLatLon(self, latlon=None): ''' Return lat/lon dat - force read if need. ''' if len(self.lat) <= 0: self.readLatlon(latlon=latlon) # check again in case it faile if len(self.lat) <= 0: myerror(' Could not read lat/lon - check files exist ') # force conversion to 64 bit for coordinate transforms return self.lat.astype(float), self.lon.astype(float)
def getModelVelocity(baseName, Q, V, minSigma=5, maxSigma=100): """Read in a tiff velocity data set and return firedrake interpolate functions. Parameters ---------- baseName : str baseName should be of the form pattern.*.abc or pattern The wildcard (*) will be filled with the suffixes (vx, vy.) e.g.,pattern.vx.abc.tif, pattern.vy.abc.tif. Q : firedrake function space function space V : firedrake vector space vector space Returns ------- uObs firedrake interp function on V velocity (m/yr) speed firedrake interp function on Q speed in (m) sigmaX firedrake interp function on Q vx error (m) sigmaY firedrake interp function on Q vy error (m) """ # suffixes for products used suffixes = ['vx', 'vy', 'ex', 'ey'] rasters = {} # prep baseName - baseName.*.xyz.tif or baseName.* if '*' not in baseName: baseName += '.*' if '.tif' not in baseName: baseName += '.tif' # read data for suffix in suffixes: myBand = baseName.replace('*', suffix) if not os.path.exists(myBand): u.myerror(f'Velocity/error file - {myBand} - does not exist') rasters[suffix] = rasterio.open(myBand, 'r') # Firedrake interpolators uObs = icepack.interpolate((rasters['vx'], rasters['vy']), V) # force error to be at least 1 to avoid 0 or negatives. sigmaX = icepack.interpolate(rasters['ex'], Q) sigmaX = icepack.interpolate(firedrake.max_value(sigmaX, minSigma), Q) sigmaX = icepack.interpolate(firedrake.min_value(sigmaX, maxSigma), Q) sigmaY = icepack.interpolate(rasters['ey'], Q) sigmaY = icepack.interpolate(firedrake.max_value(sigmaY, minSigma), Q) sigmaY = icepack.interpolate(firedrake.min_value(sigmaY, maxSigma), Q) speed = icepack.interpolate(firedrake.sqrt(inner(uObs, uObs)), Q) # return results return uObs, speed, sigmaX, sigmaY
def __init__(self, x0=None, y0=None, xs=None, ys=None, dx=None, dy=None, domain=None, verbose=True, wkt=None): """ Initialize geodat(x0=None,y0=None,xs=None,ys=None,dx=None,dy=None, domain=['greenland']) domain = greenland or antarctica """ # set everything to zero as default self.x0, self.y0, self.dx, self.dy = 0.0, 0.0, 0.0, 0.0 self.xs = self.ys = 0 self.domain = 'greenland' # in most cases all or no args would be passed. if x0 is not None: self.x0 = x0 if y0 is not None: self.y0 = y0 if xs is not None: self.xs = int(xs) if ys is not None: self.ys = int(ys) if dx is not None: self.dx = dx if dx is not None: self.dy = dy self.verbose = verbose if domain is not None: self.domain = domain.lower() domains = ['greenland', 'antarctica'] if self.domain not in domains: myerror(f'\n--- setup geodat invalid domain {self.domain} not' f' in {domains}') # # setup conversions # self.llprojEPSG = "EPSG:4326" self.llproj = pyproj.Proj(self.llprojEPSG) # Obsolete if wkt is not None: self.domain = re.findall('[a-z,A-Z]+', wkt)[1].lower() self.xyprojSRS = wkt # This could be a wkt or epsg else: if self.domain == 'greenland': self.xyprojSRS = "EPSG:3413" elif self.domain == 'antarctica': self.xyprojSRS = "EPSG:3031" self.xyproj = pyproj.Proj(self.xyprojSRS) self.lltoxyXform = pyproj.Transformer.from_crs(self.llprojEPSG, self.xyprojSRS) self.xytollXform = pyproj.Transformer.from_crs(self.xyprojSRS, self.llprojEPSG) if self.verbose: print('setting up projections', self.domain)
def computeHeading(self, fileRoot=None): if len(self.lat) == 0 or len(self.lon) == 0: myerror('Error in offsets.computeHeading: called without latitude ' 'loaded') if self.slpAz < 0: print('Error in offsets.computeHeading: no azimuth size, check ' 'geodatrxa specified') # if not self.geodatrxa.isRightLooking: myerror('offsets.computeHeading: Left looking heading not ' 'implemented yet') # lattom = 110947. skip = 1 azsp = self.slpAz * self.da * skip dlat = np.zeros(np.shape(self.lat)) dlat[skip:, :] = (self.lat[skip:, :] - self.lat[:-skip, :]) * lattom # slowly varying so replicate bottom line dlat[0:1, :] = dlat[skip, :] # make relative to south if self.geodatrxa.isSouth(): # if not self.geodatrxa.isDescending(): # for ascending, this will make dlat negative since moving away # from South # for descending, this will make dlat positive since moving # toward south dlat *= -1 mySign = 1 else: # for NH, this will make descending + if self.geodatrxa.isDescending(): dlat *= -1.0 mySign = 1. else: mySign = -1. # self.heading = np.arccos(np.clip(dlat / azsp, -1., 1.)) planeH = self.computePlaneH(self.lat, self.lon, self.heading) self.heading = mySign * \ (planeH[0] + planeH[1] * self.lat + planeH[2] * self.lon) # compute median and sigma # med=np.median(self.heading) # sig=np.std(self.heading) # clip to avoid extrem values # self.heading=np.clip(self.heading, med-sig, med+sig) # rotate back to north (this seems to fix cos issue) if not self.geodatrxa.isSouth() and self.geodatrxa.isDescending(): self.heading += np.pi return self.heading
def writeGeodat(self, geoDatFile): """ Write a geodat file """ try: fgeo = open(geoDatFile, 'w') print('# 2', file=fgeo) print(';\n; Image size (pixels) nx ny\n;', file=fgeo) print('{:d} {:d}'.format(self.xs, self.ys), file=fgeo) print(';\n; Pixel size (m) deltaX deltaY\n;', file=fgeo) print('{:.4f} {:.4f}'.format(self.dx, self.dy), file=fgeo) print(';\n; Origin, lower left corner (km) Xo Yo\n;', file=fgeo) print('{:.4f} {:.4f}'.format(self.x0, self.y0), file=fgeo) print('&', file=fgeo) fgeo.close() except Exception: myerror('Error writing geodat file ', geoDatFile)
def getModelVarFromTiff(myTiff, Q): """Read a model variable from a tiff file using rasterio Parameters ---------- myTiff : str tiff file with a scalar variable Q : firedrake function space function space Returns ------- firedrake function Data from tiff """ if not os.path.exists(myTiff): u.myerror(f'Geometry file {myTiff} does not exist') x = rasterio.open(myTiff) return icepack.interpolate(x, Q)
def parseForwardParams(parser, defaults): """ Parse model params with the following precedence: 1) Set at command line, 2) Set in a parameter file, 3) Default value. Merge in parameters that are taken from inversion result """ # args = parser.parse_args() # Set results initially from params file forwardParams = mf.readModelParams(args.params, key='forwardParams') # Overwrite with command line for arg in vars(args): # If value input through command line, override existing. argVal = getattr(args, arg) if argVal is not None: forwardParams[arg] = argVal # If not already in params, then use default value for key in defaults: if key not in forwardParams: forwardParams[key] = defaults[key] # get rid of lists for main args forwardParams['inversionResult'] = \ f'{forwardParams["inversionResult"][0]}.deg{forwardParams["degree"]}' forwardParams['forwardResultDir'] = forwardParams['forwardResult'][0] forwardParams['forwardResult'] = forwardParams['forwardResult'][0] # append deg x to all ouput files forwardParams['forwardResult'] += f'.deg{forwardParams["degree"]}' # read inversonParams inversionYaml = f'{forwardParams["inversionResult"]}.yaml' inversionParams = mf.readModelParams(inversionYaml, key='inversionParams') # # Grap inversion params for forward sim for key in ['friction', 'degree', 'mesh', 'uThresh', 'GLTaper']: try: forwardParams[key] = inversionParams[key] except Exception: myerror(f'parseForwardParams: parameter- {key} - missing from' ' inversion result') if forwardParams['GLThresh'] is None: forwardParams['GLThresh'] = GLThreshDefaults[forwardParams['friction']] # for param in ['uThresh']: # forwardParams[param] = firedrake.Constant(forwardParams[param]) return forwardParams, inversionParams
def readMask(self, maskFile=None): ''' Reads mask files - assumes filenames set, or specify here with maskFile=filename ''' if maskFile is not None: self.maskFileName(maskFile=maskFile) if len(self.maskFile) == 0: myerror(f'\n\nError: Offsets tried to read an blank mask file -' f'{self.maskFile}') if os.path.exists(self.maskFile): if self.verbose: print(f'Reading {self.maskFile} with size {self.nr}x{self.na}') self.mask = readImage(self.maskFile, self.nr, self.na, 'u1') else: myerror(f'\n\nOffsets tried to read an invalid mask ' 'file {self.maskFile} ') return self.mask
def readGeodatFromTiff(self, tiffFile): """ Read geoinformation from a tiff file and use it to create geodat info - assumes PS coordinates""" try: gdal.AllRegister() ds = gdal.Open(tiffFile) self.xs = ds.RasterXSize self.ys = ds.RasterYSize gt = ds.GetGeoTransform() self.dx = abs(gt[1]) self.dy = abs(gt[5]) self.x0 = (gt[0]+self.dx/2)*0.001 if gt[5] < 0: self.y0 = (gt[3] - self.ys * self.dy + self.dy/2)*0.001 else: self.y0 = (gt[3] + self.dy/2)*0.001 except Exception: myerror(f"Error trying to readgeodat info from : {tiffFile}")
def remove(self, toRemove): ''' Remove offsets as specified by an logical array of same size. ''' if (len(self.rgOff) > 0): if toRemove.shape != self.rgOff.shape: myerror('removeOffsets: toRemove shape doesn not equal ' 'offset set shape') self.rgOff[toRemove] = -2.e9 self.azOff[toRemove] = -2.e9 else: print('warning - no offsets remove because offsets not specified') return # do other types if (len(self.sigmaR) > 0): self.sigmaR[toRemove] = -2.e9 self.sigmaA[toRemove] = -2.e9 if (len(self.matchType) > 0): self.matchType[toRemove] = 0
def datFileName(self, datFile=None, myPath=None): ''' Compute dat file name - follows rules to set up based on standard names, but can specify overide name. ''' if len(self.fileRoot) == 0: myerror('datFileName: offsets no file Root selected') # force to use specified value elif datFile is not None: self.datFile = datFile # else use default elif 'azimuth' in self.fileRoot: self.datFile = self.fileRoot + '.dat' elif '.da' in self.fileRoot and '.dat' not in self.fileRoot: self.datFile = self.fileRoot.replace('.da', '.da.dat') # Override path if myPath is not None: self.setMyPath(myPath) self.datFile = self.offFilePath(self.datFile)
def setupMelt(forwardParams): '''Parse melt params file and return melt params and model ''' allMeltParams = mf.inputMeltParams(forwardParams['meltParamsFile']) try: meltParams = allMeltParams[forwardParams['meltParams']] except Exception: myerror(f'setupMelt: Key error for {forwardParams["meltModel"]} from ' f'melt params file {forwardParams["meltParamsFile"]}') meltModels = { 'piecewiseWithDepth': mf.piecewiseWithDepth, 'divMelt': mf.divMelt } try: meltModel = meltModels[forwardParams['meltModel']] except Exception: myerror(f'setupMelt: Invalid model selection ' f'{forwardParams["meltModel"]} not in melt def.: {meltModels}') return meltModel, meltParams
def readSigma(self, sigmaAFile=None, sigmaRFile=None): ''' Reads sigmaA/R files - assumes filenames set - but can override with sigmaAFile, sigmaRFile ''' # # set file name only if specified, assumes defaults tried in init if sigmaAFile is not None or sigmaRFile is not None: self.sigmaFileName(sigmaAFile=sigmaAFile, sigmaRFile=sigmaRFile) # read fresults after checking existence if os.path.exists(self.sigmaAFile) and os.path.exists(self.sigmaRFile): if self.verbose: print(f'Reading {self.sigmaAFile} and {self.sigmaRFile} with ' f'size {self.nr}x{self.na}') self.sigmaA = readImage(self.sigmaAFile, self.nr, self.na, '>f4') self.sigmaR = readImage(self.sigmaRFile, self.nr, self.na, '>f4') else: myerror(f'Offsets tried to read an invalid sigmaA/R file - ' f'{self.sigmaAFile} and {self.sigmaRFile}') exit() return self.sigmaR, self.sigmaA
def checkOffsetFiles(self, fileRoot=None, myLog=None): ''' Checks that offsets both exist and are the correct size. ''' if myLog is not None: myLog.logEntry('checkOffsetFiles') if fileRoot is not None: self.fileRoot = fileRoot self.datFileName() if self.datFile is None or self.fileRoot is None or \ self.azimuthFile is None: myerror('Check offsets called without setting up file names', myLogger=myLog) # if not os.path.exists(self.datFile): myerror(f'Missing offsets dat file {self.datFile}', myLogger=myLog) try: self.readOffsetsDat() except Exception: myerror(f'Error reading offsets dat file {self.datFile}', myLog=myLog) # mySize = self.nr * self.na * 4 myFiles = [self.azimuthFile, self.rangeFile] for myFile in myFiles: # check exists if not os.path.exists(myFile): myerror(f'Missing offset file {myFile}', myLogger=myLog) # check size else: statinfo = os.stat(myFile) fileSize = statinfo.st_size if fileSize != mySize: myerror( f'Offset file {myFile} should have {mySize} bytes' ' but only has {fileSize} bytes', myLogger=myLog) if myLog is not None: myLog.logReturn('checkOffsetFiles')
def readModelParams(paramsFile, key=None): """Parse a model params dict from a yaml file. File can either have a single unnamed dict: x: abc y: xyz or several: key1: x: abc y: xyc key2: var1: 1 var2: 5 In the former case no key is need. In the latter case, if a specific dict is required, it should be specified using "key", otherwise a dict of dicts will be returned. Parameters ---------- paramsFile : str yaml file with desired model params key: str [Optional] key to select which dict to return. Returns ------- dict dict with model params with 'params' added as file it was read from. """ if paramsFile is None: return {} try: with open(paramsFile) as fp: modelParams = yaml.load(fp, Loader=yaml.FullLoader) if key is not None: if key in modelParams: modelParams = modelParams[key] # Force to use name of file actually being read modelParams['params'] = paramsFile except Exception: myerror(f'Could not open params file: {paramsFile}') return modelParams
def getRACoords(self): ''' Compute single look pixel coordinates ''' if self.na < 1 or self.nr < 1: myerror('offsets.slpCoords: tried to define coordinate with no ' 'size input - make sure .dat file read') if len(self.rCoord) > 1: return self.rCoord, self.aCoord rc = np.arange(0, self.nr * self.dr, self.dr) + self.r0 ac = np.arange(0, self.na * self.da, self.da) + self.a0 self.rCoord = np.zeros((self.na, self.nr)) self.aCoord = np.zeros((self.na, self.nr)) # range coord for i in range(0, self.nr): self.aCoord[:, i] = ac.copy() # azimuth coord for i in range(0, self.na): self.rCoord[i, :] = rc.copy() # return return self.rCoord, self.aCoord