def selectOutputFile (cfg, path, tag, fmasks): ''' Selects the file to read, and updates experiment tag if needed ''' masks = fmasks.split(',') cycle = '' try: cycle = cfg['Forecast']['cycle'] except: pass if cycle == '': cycle = detectCycle (tag) print ('tag=' + tag) print ('cycle=' + cycle) outputFile = [] for m in masks: f = glob.glob(path + '*' + cycle + '*' + m + '*') for fil in f: outputFile.append(fil) if len(outputFile)>1: msg('w','Found more than 1 output detected. Verify your mask!') for f in outputFile: print(f) outputFile.sort(key=os.path.getmtime) outputFile = outputFile[-1] # Taking the latest cycle (estofs) # Update tag with the detected OFS cycle if cycle == '': for cycle in ['t00z','t06z','t12z','t18z']: if cycle in outputFile: tag = tag + '.' + cycle msg('i','Tag updated: ' + tag) return outputFile, tag
def readSurfaceField_ascii ( asciiFile, verbose=1 ): """ Reads ADCIRC 2D output file (e.g. mmaxele) Args: 'asciiFile' (str): full path to ADCIRC 2D file in ASCII format Returns: value (np.array [NP, NS]), where NP - number of grid points, and NS - number of datasets """ if verbose: msg( 'i','Reading ASCII file ' + asciiFile + '.') f = open(asciiFile) myDesc = f.readline().strip() msg( 'i','Field description [' + myDesc + '].') line = f.readline().split() myNDSETSE = int(line[0]) myNP = int(line[1]) # myNSPOOLGE = int(line[3]) # myIRTYPE = int(line[4]) # dtdpXnspoolge = float(line[2]) line = f.readline().split() # myTIME = float(line[0]) # myIT = float(line[1]) value = np.zeros([myNP,myNDSETSE], dtype=float) for s in range(myNDSETSE): for n in range(myNP): value[n,s] = float(f.readline().split()[1]) value = np.squeeze(value) fill_value = -99999.0 value[value==fill_value]=np.nan return value
def readlines_ssl(remote, verbose=False, tmpDir=None, tmpFile=None): """ Deals with expired SSL certificate issue. 1. Downloads remote into temporary file 2. Reads line by line 3. Removes temporary file """ lines = [] ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE if tmpFile is None: tmpFile = str(uuid.uuid4()) + '.tmp' if tmpDir is not None: tmpFile = os.path.join(tmpDir, tmpFile) if verbose: msg('info', 'downloading ' + remote + ' as temporary ' + tmpFile) try: urllib.request.urlretrieve(remote, tmpFile) except: msg('error', 'Cannot download ' + remote) lines = open(tmpFile, errors='replace').readlines() os.remove(tmpFile) return lines
def read_cmd_argv (argv): ''' Parse command line arguments ''' parser = argparse.ArgumentParser() parser.add_argument('-i','--iniFile', required=True) parser.add_argument('-p','--paths', required=True) args = parser.parse_args() msg('i', 'autoval.validate.run.py is configured with :') print(args) return args
def check_comout (comout): """ Checks the validity of the specified model output directory. """ if not os.path.exists(comout): msg('w','ofs path ' + comout + ' does not exist.') return 0 elif len(glob.glob(os.path.join(comout,'*.nc'))) == 0: msg('w','No netCDF files in ofs path ' + comout + '.') return 0 return 1
def readSurfaceField ( ncFile, ncVar = 'zeta_max', verbose=1 ): """ Reads specified variable from the ADCIRC 2D netCDF output and grid points along with validation time. Args: 'ncFile' (str): full path to netCDF file 'ncVar' (str): name of netCDF field Returns: dict: 'lon', 'lat', 'time', 'base_date', 'value', 'path', 'variable' """ if verbose: msg( 'i','Reading [' + ncVar + '] from ' + ncFile) if not os.path.exists (ncFile): msg( 'e','File ' + ncFile + ' does not exist.') return nc = netCDF4.Dataset (ncFile) lon = nc.variables['x'][:] lat = nc.variables['y'][:] tim = nc.variables['time'][:] fld = nc.variables[ncVar][:] missingVal = nc.variables[ncVar]._FillValue try: fld.unshare_mask() except: pass fld [fld==missingVal] = np.nan d = nc.variables['time'].base_date[0:19].strip() if len(d)==19: baseDate = datetime.strptime(d,'%Y-%m-%d %H:%M:%S') if len(d)==16: baseDate = datetime.strptime(d,'%Y-%m-%d %H:%M') realtime = np.array([baseDate + timedelta(seconds=int(tim[i])) for i in range(len(tim))]) return { 'lon' : lon, 'lat' : lat, 'time' : realtime, 'base_date': baseDate, 'value' : fld, 'path' : ncFile, 'variable' : ncVar}
def readTimeSeries (ncFile, ncVar = 'zeta', verbose=1): """ Reads fort.61.nc-like file """ if verbose: msg( 'i','Reading [' + ncVar + '] from ' + ncFile) if not os.path.exists (ncFile): msg( 'e','File ' + ncFile + ' does not exist.') return nc = netCDF4.Dataset( ncFile ) fld = nc.variables[ncVar][:] missingVal = nc.variables[ncVar]._FillValue try: fld.unshare_mask() except: pass fld [np.where(fld == missingVal)] = np.nan lon = nc.variables['x'][:] lat = nc.variables['y'][:] tim = nc.variables['time'][:] nam = nc.variables['station_name'][:] stations = netCDF4.chartostring(nam) # Python3 requirement? ncTitle = nc.getncattr('title') try: baseDate = datetime.strptime(nc.variables['time'].base_date[0:19], '%Y-%m-%d %H:%M:%S') except: # when 00 sec is not written at all baseDate = datetime.strptime(nc.variables['time'].base_date[0:19], '%Y-%m-%d %H:%M ') realtime = np.array([baseDate + timedelta(seconds=int(tim[i])) for i in range(len(tim))]) return {'lat' : lat, 'lon' : lon, 'time' : realtime, 'base_date' : baseDate, 'zeta' : fld, 'stations' : stations, 'title' : ncTitle}
def setDomainLimits(cfg): ''' Set/get bbox ''' lonMin = -89.99 lonMax = 89.99 latMin = -179.99 latMax = 179.99 try: lonMin = float( cfg['Analysis']['lonmin']) except: pass try: lonMax = float( cfg['Analysis']['lonmax']) except: pass try: latMin = float( cfg['Analysis']['latmin']) except: pass try: latMax = float( cfg['Analysis']['latmax']) except: pass try: domainFile = os.path.join(cfg['Analysis']['localdatadir'], 'domain.ini') csdllib.oper.transfer.refresh( cfg['Analysis']['domainfile'], domainFile) dom = csdllib.oper.sys.config (domainFile) lonMin = float(dom['Limits']['lonmin']) lonMax = float(dom['Limits']['lonmax']) latMin = float(dom['Limits']['latmin']) latMax = float(dom['Limits']['latmax']) msg('i','Domain is read from ' + cfg['Analysis']['domainfile']) except: pass msg('i','Domain limits are '+ str(lonMin) + ', ' + str(lonMax) + ', ' + str(latMin) + ', ' + str(latMax)) cfg['Analysis']['lonmin'] = lonMin cfg['Analysis']['lonmax'] = lonMax cfg['Analysis']['latmin'] = latMin cfg['Analysis']['latmax'] = latMax return cfg
def readlines(remote, verbose=False, tmpDir=None, tmpFile=None): """ 1. Downloads remote into temporary file 2. Reads line by line 3. Removes temporary file """ if tmpFile is None: tmpFile = str(uuid.uuid4()) + '.tmp' # Unique temporary name if tmpDir is not None: tmpFile = os.path.join(tmpDir, tmpFile) if verbose: msg('info', 'downloading ' + remote + ' as temporary ' + tmpFile) urllib.request.urlretrieve(remote, tmpFile) lines = open(tmpFile, errors='replace').readlines() os.remove(tmpFile) return lines
def writeOffset63 ( val, offset63file, note=None): """ Writes ADCIRC offset.63 file in ASCII format for use with pseudo pressure loading option Args: val (float) : Array of gridded values offset63file (str) : Full path to the output file Note: val should be the same size and order as your grid vectors """ msg( 'i','Writing Offset63 file...') f = open(offset63file,'w') if note is None: f.write("# ADCIRC Offset file\n") else: f.write("# " + note + "\n") f.write("1.0\n") # ADCIRC Version 55 f.write("1.0\n") for n in range(len(val)): f.write(str(n+1) + ' ' + str(val[n]) + '\n') f.close() return None
def refresh(remote, local): """ Downloads remote file (using urllib2), overwrites local copy if exists. """ if not os.path.exists(local): msg('info', 'downloading ' + remote + ' as ' + local) else: msg('info', 'overwriting ' + local + ' file with ' + remote) try: urllib.request.urlretrieve(remote, local) except: msg('warn', 'file ' + remote + ' was not downloaded. trying to cp...') try: os.system('cp ' + remote + ' ' + local) except: msg('warn', 'file ' + remote + ' could not be copied')
def download(remote, local): """ Downloads remote file (using urllib2) if it does not exist locally. """ if not os.path.exists(local): msg('info', 'Downloading ' + remote + ' as ' + local) try: urllib.request.urlretrieve(remote, local) except: msg('warn', 'file ' + remote + ' was not downloaded. trying to cp...') try: os.system('cp ' + remote + ' ' + local) except: msg('warn', 'file ' + remote + ' could not be copied') else: msg('warn', 'file ' + local + ' exists, skipping.')
def setDir (path, flush=False): """ Creates (or flushes) directories. """ if not os.path.exists(path): msg('w', 'Path='+path+' does not exist. Trying to mkdir.') try: os.makedirs(path) except: msg ('e', 'Cannot make path=' + path) elif flush: msg('w', 'Path='+path+' will be flushed.') shutil.rmtree(path) setDir (path)
def upload(localFile, userHost, remoteFolder): cmd = 'scp -q ' + localFile + ' ' + userHost + ':' + remoteFolder if os.system(cmd) == 0: msg('info', 'executed ' + cmd) else: msg('error', 'failed to execute ' + cmd)
def pointValidation (cfg, path, tag): ''' Works on point data ''' pointSkill = [] tmpDir = cfg['Analysis']['tmpdir'] diagVar = cfg['Analysis']['name'] # Choose the model output file fmask = cfg[diagVar]['pointfilemask'] outputFile, tag = selectOutputFile (cfg, path, tag, fmask) # Read list of stations out of model file model = csdllib.models.adcirc.readTimeSeries (outputFile) stations = model['stations'] if len(stations) == 0: msg('w','No stations found') # Set/get bbox lonMin = float( cfg['Analysis']['lonmin']) lonMax = float( cfg['Analysis']['lonmax']) latMin = float( cfg['Analysis']['latmin']) latMax = float( cfg['Analysis']['latmax']) # Set/get datespan dates = model['time'] datespan = [dates[0], dates[-1]] try: datespan[0] = stampToTime (cfg[diagVar].get('pointdatesstart')) except: pass try: datespan[1] = stampToTime (cfg[diagVar].get('pointdatesend')) except: pass msg ( 'i','Datespan for analysis is set to: ' \ + timeToStamp(datespan[0]) + ' ' + timeToStamp(datespan[1]) ) for n in range(len(stations)): msg('i', 'Working on station : ' + str(n).zfill(5) + ' ' + stations[n].strip()) myPointData = dict () isVirtual = False # 'virtual' station has no obs counterpart forecast = model['zeta'][:,n] forecast[np.where(forecast<-100.)] = np.nan # _fillvalue doesnt work # Try to obtain NOS ID nosid = csdllib.data.coops.getNOSID ( stations[n].strip() ) if nosid is None: isVirtual = True # add attempts to get UH or GLOSS ids here else: # Try to get stations' info, save locally as info.nos.XXXXXXX.dat localFile = os.path.join( cfg['Analysis']['localdatadir'], 'info.nos.'+nosid+'.dat') if not os.path.exists(localFile): info = csdllib.data.coops.getStationInfo (nosid, verbose=1, tmpDir=tmpDir) if info is None: msg('w','No info found for station ' + nosid) isVirtual = True else: csdllib.data.coops.writeStationInfo (info, localFile) else: info = csdllib.data.coops.readStationInfo (localFile) if isVirtual: info = dict() info['nosid'] = 'UN'+str(n).zfill(5) info['lon'] = model['lon'][n] info['lat'] = model['lat'][n] info['name'] = model['stations'][n] info['state'] = 'UN' msg('w','Station is not NOAA gauge. Using id=' + info['nosid']) # Check lon/lats if info['lon'] < -180: info['lon'] = 360.+info['lon'] print(str(info['lon'])) if lonMin <= info['lon'] and info['lon'] <= lonMax and \ latMin <= info['lat'] and info['lat'] <= latMax: # plot time series if cfg['Analysis']['pointdataplots']: if not isVirtual: # Get station's water levels for this timespan, save locally localFile = os.path.join( cfg['Analysis']['localdatadir'], 'cwl.nos.' + nosid + '.' + \ timeToStamp(datespan[0]) + '-' + \ timeToStamp(datespan[1]) + '.dat') if not os.path.exists(localFile): obs = csdllib.data.coops.getData(nosid, datespan, tmpDir=tmpDir) csdllib.data.coops.writeData (obs, localFile) else: obs = csdllib.data.coops.readData ( localFile ) refDates = np.nan obsVals = np.nan modVals = np.nan if len(obs['values']) == 0: msg('w','No obs found for station ' + nosid + ', skipping.') isVirtual = True elif len(forecast) == 0 or np.sum(~np.isnan(forecast)) == 0: msg('w','No forecast found for station ' + nosid + ', skipping.') else: # Unify model and data series refDates, obsVals, modVals = \ csdllib.methods.interp.retime ( \ obs ['dates'], obs['values'], \ model['time'], forecast, refStepMinutes=6) # Compute statistics M = csdllib.methods.statistics.metrics (obsVals, modVals, refDates) myPointData['id'] = nosid myPointData['info'] = info myPointData['metrics'] = M pointSkill.append ( myPointData ) try: plt.waterlevel.pointSeries(cfg, obsVals, modVals, refDates, nosid, info, tag, model['time'], forecast) except: isVirtual = True pass if isVirtual: # Compute statistics M = csdllib.methods.statistics.metrics (np.nan, np.nan, np.nan) myPointData['id'] = nosid myPointData['info'] = info myPointData['metrics'] = M pointSkill.append ( myPointData ) try: plt.waterlevel.pointSeries(cfg, None, forecast, model['time'], info['nosid'], info, tag, model['time'], forecast) except: msg('w','Virtual station ' + nosid + ' was not plotted.') pass # plot station map if cfg['Analysis']['pointlocationmap']: plt.waterlevel.stationMap(cfg, info['nosid'], info, tag) # Plot dashpanels if cfg['Analysis']['pointskillpanel'] and not isVirtual: plt.skill.panel(cfg, M, refDates, nosid, info, tag) else: msg('i','Station ' + info['nosid'] + ' is not within the domain. Skipping') # # # Done running on stations list return pointSkill, datespan, tag
def fieldValidation (cfg, path, tag, grid): ''' Works on point data ''' imgDir = os.path.join( cfg['Analysis']['reportdir'], cfg['Analysis']['imgdir']) tmpDir = cfg['Analysis']['tmpdir'] fieldVal = [] diagVar = cfg['Analysis']['name'] print (diagVar) # Choose the model output file if cfg['Analysis']['fielddataplots'] == 1: # Plot maxele fmask = cfg[diagVar]['fieldfilemask'] fieldFile, tag = selectOutputFile (cfg, path, tag, fmask) model = csdllib.models.adcirc.readSurfaceField (fieldFile, cfg[diagVar]['fieldfilevariable']) maxele = csdllib.models.adcirc.computeMax (model['value']) lons = model['lon'] print('maxele lonlim = ' + str(np.min(lons)) + ' ' + str(np.max(lons))) clim = [ float(cfg[diagVar]['maxfieldymin']), float(cfg[diagVar]['maxfieldymax']) ] if diagVar.lower() == 'waterlevel': plt.field.map (cfg, grid, maxele, clim, tag, 'Maximal Elevation') figFile = os.path.join(imgDir, 'map.max.png') if diagVar.lower() == 'windvelocity': maxele = 1.94384*maxele # mps to knots plt.field.map (cfg, grid, maxele, clim, tag, 'Maximal Wind Velocity') figFile = os.path.join(imgDir, 'map.maxwvel.png') plt.field.save (figFile) if cfg['Analysis']['maxfieldplots'] == 1: fmask = cfg[diagVar]['maxfieldfilemask'] fieldFile, tag = selectOutputFile (cfg, path, tag, fmask) model = csdllib.models.adcirc.readSurfaceField (fieldFile, cfg[diagVar]['maxfieldvariable']) maxele = model['value'] lons = model['lon'] clim = [ float(cfg[diagVar]['maxfieldymin']), float(cfg[diagVar]['maxfieldymax']) ] if diagVar.lower() == 'waterlevel': plt.field.map (cfg, grid, maxele, clim, tag, 'Maximal Elevation') figFile = os.path.join(imgDir, 'map.max.png') if diagVar.lower() == 'windvelocity': maxele = 1.94384*maxele # mps to knots plt.field.map (cfg, grid, maxele, clim, tag, 'Maximal Wind Velocity') figFile = os.path.join(imgDir, 'map.maxwvel.png') plt.field.save (figFile) #Zoom levels, 1 to 4 if cfg['Analysis']['maxfieldplots'] == 1 or cfg['Analysis']['fielddataplots'] == 1: for zoom in range(1,5): print('Working on zoom ' + str(zoom)) try: iniFile = cfg['Zoom'+str(zoom)]['domainfile'] lonlim, latlim = csdllib.plot.map.ini(iniFile, local=os.path.join(tmpDir, 'mapfile.ini')) cfgzoom = copy.deepcopy(cfg) cfgzoom['Analysis']['lonmin'] = lonlim[0] cfgzoom['Analysis']['lonmax'] = lonlim[1] cfgzoom['Analysis']['latmin'] = latlim[0] cfgzoom['Analysis']['latmax'] = latlim[1] #figFile = os.path.join(imgDir, tag+'.map.max.'+ str(zoom)+'.png') if diagVar.lower() == 'waterlevel': plt.field.map (cfgzoom, grid, maxele, clim, tag, 'Maximal Elevation', fig_w=5.0) figFile = os.path.join(imgDir, 'map.max.'+ str(zoom)+'.png') if diagVar.lower() == 'windvelocity': plt.field.map (cfgzoom, grid, maxele, clim, tag, 'Maximal Wind Velocity', fig_w=5.0) figFile = os.path.join(imgDir, 'map.maxwvel.'+ str(zoom)+'.png') plt.field.save (figFile) except: pass if cfg['Analysis']['fieldevolution']: # Do the movie if os.system('which convert') == 0: clim = [ float(cfg[diagVar]['fieldymin']), float(cfg[diagVar]['fieldymax']) ] for n in range(len(model['time'])): msg('i','Working on ' + str(n)) multi_plot(cfg, tag, grid, model, clim, n) gifFile = os.path.join( imgDir, tag + '.gif') cmd = "convert -delay 20 -loop 1 " + \ os.path.join(cfg['Analysis']['tmpdir'], tag+'*.mov*.png') + \ " " + gifFile os.system(cmd) else: msg('e','You need Convert installed on your system.') return fieldVal, grid, tag
def readGrid ( gridFile, verbose=1): """ Reads ADCIRC grid file Args: gridFile (str): full path to fort.14 file Returns: grid (dict): field names according to ADCIRC internal variables: http://adcirc.org/home/documentation/users-manual-v50/ input-file-descriptions/adcirc-grid-and-boundary-information-file-fort-14/ """ if verbose: msg( 'info', 'Reading the grid from ' + gridFile) if not os.path.exists (gridFile): msg( 'error', 'File ' + gridFile + ' does not exist.') return f = open(gridFile) myDesc = f.readline().rstrip() myNE, myNP = map(int, f.readline().split()) if verbose: msg( 'i','Grid description ' + myDesc + '.') msg( 'i','Grid size: NE= ' + str(myNE) + ', NP=' + str(myNP) + '.') myPoints = np.zeros([myNP,3], dtype=float) myElements = np.zeros([myNE,3], dtype=int) if verbose: msg( 'i','Reading grid points...') for k in range(myNP): line = f.readline().split() myPoints[k,0] = float(line[1]) myPoints[k,1] = float(line[2]) myPoints[k,2] = float(line[3]) if verbose: msg( 'i','Reading grid elements...') for k in range(myNE): line = f.readline().split() #myElements[k,0:2] = map(int, line[2:4]) myElements[k,0] = int (line[2]) myElements[k,1] = int (line[3]) myElements[k,2] = int (line[4]) myNOPE = int(f.readline().split()[0]) myNETA = int(f.readline().split()[0]) myNVDLL = np.zeros([myNOPE], dtype=int) myNBDV = np.zeros([myNOPE, myNETA], dtype=int) if verbose: msg('i', 'Reading elevation-specified boundaries...') for k in range(myNOPE): myNVDLL [k] = int(f.readline().split()[0]) for j in range(myNVDLL[k]): myNBDV[k,j] = int(f.readline().strip()) myNBOU = int(f.readline().split()[0]) myNVEL = int(f.readline().split()[0]) myNVELL = np.zeros([myNBOU], dtype=int) myIBTYPE = np.zeros([myNBOU], dtype=int) myNBVV = np.zeros([myNBOU, myNVEL], dtype=int) myBARLANHT = np.zeros([myNBOU, myNVEL], dtype=float) myBARLANCFSP = np.zeros([myNBOU, myNVEL], dtype=float) myIBCONN = np.zeros([myNBOU, myNVEL], dtype=int) myBARINHT = np.zeros([myNBOU, myNVEL], dtype=float) myBARINCFSB = np.zeros([myNBOU, myNVEL], dtype=float) myBARINCFSP = np.zeros([myNBOU, myNVEL], dtype=float) myPIPEHT = np.zeros([myNBOU, myNVEL], dtype=float) myPIPECOEF = np.zeros([myNBOU, myNVEL], dtype=float) myPIPEDIAM = np.zeros([myNBOU, myNVEL], dtype=float) if verbose: msg('i', 'Reading normal flow-specified boundaries...') for k in range(myNBOU): line = f.readline().split() myNVELL[k] = int(line[0]) myIBTYPE[k] = int(line[1]) for j in range(myNVELL[k]): line = f.readline().rstrip().split() if myIBTYPE[k] in [0,1,2,10,11,12,20,21,22,30]: myNBVV [k,j] = int(line[0]) elif myIBTYPE[k] in [3,13,23]: myNBVV [k,j] = int (line[0]) myBARLANHT [k,j] = float(line[1]) myBARLANCFSP[k,j] = float(line[2]) elif myIBTYPE[k] in [4,24]: myNBVV [k,j] = int (line[0]) myIBCONN [k,j] = int (line[1]) myBARINHT [k,j] = float(line[2]) myBARINCFSB [k,j] = float(line[3]) myBARINCFSP [k,j] = float(line[4]) elif myIBTYPE[k] in [5,25]: myNBVV [k,j] = int (line[0]) myIBCONN [k,j] = int (line[1]) myBARINHT [k,j] = float(line[2]) myBARINCFSB [k,j] = float(line[3]) myBARINCFSP [k,j] = float(line[4]) myPIPEHT [k,j] = float(line[5]) myPIPECOEF [k,j] = float(line[6]) myPIPEDIAM [k,j] = float(line[7]) f.close() return {'GridDescription' : myDesc, 'NE' : myNE, 'NP' : myNP, 'lon' : np.squeeze(myPoints[:,0]), 'lat' : np.squeeze(myPoints[:,1]), 'depth' : np.squeeze(myPoints[:,2]), 'Elements' : np.squeeze(myElements), 'NETA' : myNETA, 'NOPE' : myNOPE, 'ElevationBoundaries' : np.squeeze(myNBDV), 'NormalFlowBoundaries' : np.squeeze(myNBVV), 'ExternalBarrierHeights' : np.squeeze(myBARLANHT), 'ExternalBarrierCFSPs' : np.squeeze(myBARLANCFSP), 'BackFaceNodeNormalFlow' : np.squeeze(myIBCONN), 'InternalBarrierHeights' : np.squeeze(myBARINHT), 'InternallBarrierCFSPs' : np.squeeze(myBARINCFSP), 'InternallBarrierCFSBs' : np.squeeze(myBARINCFSB), 'CrossBarrierPipeHeights' : np.squeeze(myPIPEHT), 'BulkPipeFrictionFactors' : np.squeeze(myPIPECOEF), 'CrossBarrierPipeDiameter' : np.squeeze(myPIPEDIAM) }
msg('i','Domain limits are '+ str(lonMin) + ', ' + str(lonMax) + ', ' + str(latMin) + ', ' + str(latMax)) cfg['Analysis']['lonmin'] = lonMin cfg['Analysis']['lonmax'] = lonMax cfg['Analysis']['latmin'] = latMin cfg['Analysis']['latmax'] = latMax return cfg #============================================================================== if __name__ == "__main__": ''' Generic Validation Driver ''' msg('time', str(datetime.datetime.utcnow()) + ' UTC') cmd = read_cmd_argv (sys.argv[1:]) # Read command line aruments cfg = csdllib.oper.sys.config (cmd.iniFile) # Read config file cfg = setDomainLimits(cfg) # Set domain limits cycle = '' # OFS cycle try: cycle = cfg['Forecast']['cycle'] except: pass # Set up validation execution paths, flush tmp directory workDir = cfg['Analysis']['workdir'] dataDir = cfg['Analysis']['localdatadir'] tmpDir = cfg['Analysis']['tmpdir'] wwwDir = cfg['Analysis']['reportdir'] imgDir = os.path.join(wwwDir, cfg['Analysis']['imgdir']) setDir (workDir)
def add(ax): """ Adds data points to the axis """ msg('e', 'function is not yet implemented') return ax
def readFort15 ( fort15file, verbose=1 ): """ Reads ADCIRC fort.15 file according to: http://adcirc.org/home/documentation/users-manual-v50/ input-file-descriptions/ model-parameter-and-periodic-boundary-condition-file-fort-15/ """ config = dict() tides = dict() if verbose: msg( 'i','Reading fort.15 file ' + fort15file) f = open(fort15file,'r') config['mesh'] = f.readline().strip() config['description'] = f.readline().strip() config['NFOVER'] = int(f.readline().split()[0]) config['NABOUT'] = int(f.readline().split()[0]) config['NSCREEN'] = int(f.readline().split()[0]) config['IHOT'] = int(f.readline().split()[0]) config['ICS'] = int(f.readline().split()[0]) config['IM'] = int(f.readline().split()[0]) config['NOLIBF'] = int(f.readline().split()[0]) config['NOLIFA'] = int(f.readline().split()[0]) config['NOLICA'] = int(f.readline().split()[0]) config['NOLICAT'] = int(f.readline().split()[0]) config['NWP'] = int(f.readline().split()[0]) config['node attrib'] = [] for n in range(config['NWP']): config['node attrib'].append(f.readline().strip()) config['NCOR'] = int(f.readline().split()[0]) config['NTIP'] = int(f.readline().split()[0]) config['NWS'] = int(f.readline().split()[0]) config['NRAMP'] = int(f.readline().split()[0]) config['G'] = float(f.readline().split()[0]) config['TAU0'] = int(f.readline().split()[0]) config['DT'] = float(f.readline().split()[0]) config['STATIM'] = float(f.readline().split()[0]) config['REFTIM'] = float(f.readline().split()[0]) line = f.readline().split() config['WTIMINC_Year'] = int(line[0]) config['WTIMINC_Month'] = int(line[1]) config['WTIMINC_Day'] = int(line[2]) config['WTIMINC_Param1'] = int(line[3]) config['WTIMINC_Param2'] = float(line[4]) config['WTIMINC_Param3'] = float(line[5]) config['RNDAY'] = float(f.readline().split()[0]) config['DRAMP'] = float(f.readline().split()[0]) line = f.readline().split() config['TWF_GWCE_Param1'] = float(line[0]) config['TWF_GWCE_Param2'] = float(line[1]) config['TWF_GWCE_Param3'] = float(line[2]) line = f.readline().split() config['H0'] = float(line[0]) config['NODEDRYMIN'] = float(line[1]) config['NODEWETRMP'] = float(line[2]) config['VELMIN'] = float(line[3]) line = f.readline().split() config['SLAM0'] = float(line[0]) config['SFEA0'] = float(line[1]) config['FFACTOR'] = float(f.readline().split()[0]) config['ESL'] = float(f.readline().split()[0]) config['CORI'] = float(f.readline().split()[0]) config['NTIF'] = int(f.readline().split()[0]) tides['TIPOTAG_name'] = [] tides['TPK'] = [] tides['AMIGT'] = [] tides['ETRF'] = [] tides['FFT'] = [] tides['FACET'] = [] for n in range(config['NTIF']): tides['TIPOTAG_name'].append(f.readline().strip()) line = f.readline().split() tides['TPK'].append(float(line[0])) tides['AMIGT'].append(float(line[1])) tides['ETRF'].append(float(line[2])) tides['FFT'].append(float(line[3])) tides['FACET'].append(float(line[4])) config['NBFR'] = int(f.readline().split()[0]) tides['BOUNDTAG_name'] = [] tides['AMIG'] = [] tides['FF'] = [] tides['FACE'] = [] for n in range(config['NBFR']): tides['BOUNDTAG_name'].append(f.readline().strip()) line = f.readline().split() tides['AMIG'].append(float(line[0])) tides['FF'].append(float(line[1])) tides['FACE'].append(float(line[2])) config['NETA'] = 0 # is undefined thank you very much! # Finding NETA... f.readline() # k1 count = 0 stopReading = False while not stopReading: line = f.readline().split() if len(line)==2: count += 1 else: #o1 config['NETA'] = count for m in range(config['NETA']): #o1 f.readline() stopReading = True for n in range(config['NBFR']-2): #p1 f.readline() for m in range(config['NETA']): f.readline() config['ANGINN'] = float(f.readline().split()[0]) line = f.readline().split() config['NOUTE'] = float(line[0]) config['TOUTSE'] = float(line[1]) config['TOUTFE'] = float(line[2]) config['NSPOOLE'] = int(line[3]) stations = readStationsList (f) config['NSTATIONS'] = len(stations['lon']) linelist = f.readlines() coldstart = linelist[len(linelist)-1].strip() f.close() return {'config' : config, 'tides' : tides, 'stations' : stations, 'coldstart' : coldstart }
def readOffset63 ( offset63file): msg( 'e','readOffset63() is not yet implemented.') return None
def set(): """OD Sets up scatter plot axis """ msg('e', 'function is not yet implemented') return ax