def get_distance_between_pts(pos1, pos2): """Function will take two points and calculate distance over ground between them. This process will convert to north Carolina state plane meters to calculatea process. Args: pos1(tuple): assumed to be longitude/latitude of length two pos2(tuple): assumed to be longitude/latitude of length two Returns: Euclidian distance between two points """ out1 = gp.FRFcoord(pos1[0], pos1[1]) out2 = gp.FRFcoord(pos2[0], pos2[1]) return np.sqrt((out1['StateplaneE'] - out2['StateplaneE'])**2 + (out1['StateplaneN'] - out2['StateplaneN'])**2)
def getClaris(self): """ Operates on Claris files, which are not yet on THREDDS, and may not be saved in the format/fashion this script is designed to handle. Currently operates on a .mat file using h5py. A transformation is applied to get everything to NC state plane and then to FRF coords with testbedutils.geoprocess BUT only keeps data between yFRF=-100 and 1400 """ if self.clarisFile is None: return None with h5py.File(self.clarisFile, 'r') as f: for k in f.keys(): print(k) x = f['grid/x'][:] y = f['grid/y'][:] z = f['grid/z'][:] rot = np.array([[0.933218541975915, -0.359309271954326], [0.359309271954326, 0.933218541975915]]) points = np.vstack([x.flatten(), y.flatten()]) # , z.flatten()]) rotated = np.matmul(rot, points) rotated = rotated.T NCx = rotated[:, 0] + 9.030235779999999e+05 NCy = rotated[:, 1] + 2.710970920000000e+05 FRF = geoprocess.FRFcoord(NCx, NCy) x_claris = FRF['xFRF'] y_claris = FRF['yFRF'] z_claris = z.flatten() del x, y, z indomain = np.where(np.logical_and(y_claris > -100, y_claris < 1400)) y_claris = y_claris[indomain] x_claris = x_claris[indomain] z_claris = z_claris[indomain] output = dict() output['x'] = x_claris output['y'] = y_claris output['z'] = z_claris return output
def makenc_CSHORErun(ofname, dataDict, globalYaml, varYaml): """This is a function that makes netCDF files from CSHORE model runs created by David Young using all the stuff Spicer Bak used. You have to build dataDict from the different dictionaries output by cshore_io.load_CSHORE_results(). YOU DONT HAVE TO HAND IT LAT LON THOUGH!!! Args: dataDict: keys: time: - time steps of the simulation nc file xFRF: - xFRF positions of the simulation aveE: - depth averaged eastward current! stdE: - standard deviation of eastward current aveN: - same as above but northward current stdN: - same as above but northward waveHs: - significant wave heights waveMeanDirection: mean direction of the waves at each cross-shore position waterLevel: mean water level at each cross-shore position stdWaterLevel: standard deviation of the water surface elevation at each cross-shore position setup: wave setup at each cross-shore position runup2perc: 2 percent exceedance runup elevation for each model time-step runupMean: mean runup elevation for each model time-step qbx: cross-shore bed load sediment transport rate qsx: cross-shore suspended sediment transport rate qby: alongshore bed load sediment transport rate qsy: alongshore suspended sediment transport rate probabilitySuspension: probability that sediment will be suspended at particular node probabilityMovement: probability that sediment will move suspendedSedVolume: suspended sediment volume at each cross-shore position bottomElevation: the bottom elevation at each xFRF position in the simulation surveyNumber: this is the surveyNumber that the integrated bathymetry for this simulation was built on profileNumber: this is either the profileNumber of the survey or the alongshore position of the integratred bathymetry transect that is used as the bed elevation boundary condition bathymetryDate: this is the day that the aforementioned survey was taken yFRF: this is the yFRF position of the transect itself. if it is the integrated bathymetry, then this will be identical to the profileNumber ofname (str): this is the FULL PATH INCLUDING FILENAME AND EXTENSION to the position where the ncFile will be saved when output globalYaml (str): full path to the globalYaml used to build this ncFile varYaml (str): full path to the variableYaml used to build this ncFile Returns: netCDF file with CSHORE model results in it """ from testbedutils import geoprocess as gp # this might create a circular import globalAtts = import_template_file(globalYaml) varAtts = import_template_file(varYaml) # create netcdf file fid = init_nc_file(ofname, globalAtts) # note: you have to hand this the yFRF coordinates of the BC gage if you want to get lat/lon.. lx = np.size(dataDict['xFRF'], axis=0) lat = np.zeros(lx) lon = np.zeros(lx) for ii in range(0, lx): coords = gp.FRFcoord(dataDict['xFRF'][ii], dataDict['yFRF']) lat[ii] = coords['Lat'] lon[ii] = coords['Lon'] dataDict['latitude'] = lat dataDict['longitude'] = lon # ok, we are HARD CODING the dimensions to ALWAYS be at the 8m ARRAY (xFRF = 914.44 rounded DOWN to 914) # we will just fill in the missing values with nans as required array8m_loc = 914 # creating dimensions of data new_s = np.shape(range(-50, array8m_loc + 1))[0] new_t = np.shape(dataDict['waveHs'])[0] xFRF = fid.createDimension('xFRF', new_s) time = fid.createDimension('time', new_t) # check to see if the grid I am importing is smaller than my netCDF grid if np.shape(range(-50, array8m_loc + 1))[0] == np.shape(dataDict['xFRF']): # the model grid is the same as the netCDF grid, so do nothing dataDict_n = dataDict pass else: dataDict_n = { 'xFRF': np.flipud(np.array(range(-50, array8m_loc + 1)) + 0.0), 'time': dataDict['time'], 'aveE': np.full((new_t, new_s), fill_value=np.nan), 'stdE': np.full((new_t, new_s), fill_value=np.nan), 'aveN': np.full((new_t, new_s), fill_value=np.nan), 'stdN': np.full((new_t, new_s), fill_value=np.nan), 'waveHs': np.full((new_t, new_s), fill_value=np.nan), 'waveMeanDirection': np.full((new_t, new_s), fill_value=np.nan), 'waterLevel': np.full((new_t, new_s), fill_value=np.nan), 'stdWaterLevel': np.full((new_t, new_s), fill_value=np.nan), 'setup': np.full((new_t, new_s), fill_value=np.nan), 'runup2perc': dataDict['runup2perc'], 'runupMean': dataDict['runupMean'], 'qbx': np.full((new_t, new_s), fill_value=np.nan), 'qsx': np.full((new_t, new_s), fill_value=np.nan), 'qby': np.full((new_t, new_s), fill_value=np.nan), 'qsy': np.full((new_t, new_s), fill_value=np.nan), 'probabilitySuspension': np.full((new_t, new_s), fill_value=np.nan), 'probabilityMovement': np.full((new_t, new_s), fill_value=np.nan), 'suspendedSedVolume': np.full((new_t, new_s), fill_value=np.nan), 'bottomElevation': np.full((new_t, new_s), fill_value=np.nan), 'latitude': np.full((new_s), fill_value=np.nan), 'longitude': np.full((new_s), fill_value=np.nan), 'surveyNumber': dataDict['surveyNumber'], 'profileNumber': dataDict['profileNumber'], 'bathymetryDate': dataDict['bathymetryDate'], 'yFRF': dataDict['yFRF'], } if 'FIXED' in ofname: dataDict_n['bottomElevation'] = np.full((new_s), fill_value=np.nan) elif 'MOBILE' in ofname: dataDict_n['bottomElevation'] = np.full((new_t, new_s), fill_value=np.nan) else: print 'You need to modify makenc_CSHORErun in makenc.py to accept your new version name!' # find index of first point on dataDict grid min_x = min(dataDict['xFRF']) ind_minx = int(np.argwhere(dataDict_n['xFRF'] == min_x)) max_x = max(dataDict['xFRF']) ind_maxx = int(np.argwhere(dataDict_n['xFRF'] == max_x)) for ii in range(0, int(new_t)): dataDict_n['aveE'][ii][ind_maxx:ind_minx + 1] = dataDict['aveE'][ii] dataDict_n['stdE'][ii][ind_maxx:ind_minx + 1] = dataDict['stdE'][ii] dataDict_n['aveN'][ii][ind_maxx:ind_minx + 1] = dataDict['aveN'][ii] dataDict_n['stdN'][ii][ind_maxx:ind_minx + 1] = dataDict['stdN'][ii] dataDict_n['waveHs'][ii][ind_maxx:ind_minx + 1] = dataDict['waveHs'][ii] dataDict_n['waveMeanDirection'][ii][ ind_maxx:ind_minx + 1] = dataDict['waveMeanDirection'][ii] dataDict_n['waterLevel'][ii][ind_maxx:ind_minx + 1] = dataDict['waterLevel'][ii] dataDict_n['stdWaterLevel'][ii][ind_maxx:ind_minx + 1] = dataDict['stdWaterLevel'][ii] dataDict_n['setup'][ii][ind_maxx:ind_minx + 1] = dataDict['setup'][ii] dataDict_n['qbx'][ii][ind_maxx:ind_minx + 1] = dataDict['qbx'][ii] dataDict_n['qsx'][ii][ind_maxx:ind_minx + 1] = dataDict['qsx'][ii] dataDict_n['qby'][ii][ind_maxx:ind_minx + 1] = dataDict['qby'][ii] dataDict_n['qsy'][ii][ind_maxx:ind_minx + 1] = dataDict['qsy'][ii] dataDict_n['probabilitySuspension'][ii][ ind_maxx:ind_minx + 1] = dataDict['probabilitySuspension'][ii] dataDict_n['probabilityMovement'][ii][ ind_maxx:ind_minx + 1] = dataDict['probabilityMovement'][ii] dataDict_n['suspendedSedVolume'][ii][ ind_maxx:ind_minx + 1] = dataDict['suspendedSedVolume'][ii] dataDict_n['latitude'][ind_maxx:ind_minx + 1] = dataDict['latitude'][ii] dataDict_n['longitude'][ind_maxx:ind_minx + 1] = dataDict['longitude'][ii] if 'FIXED' in ofname: dataDict_n['bottomElevation'][ind_maxx:ind_minx + 1] = dataDict['bottomElevation'] elif 'MOBILE' in ofname: for ii in range(0, int(new_t)): dataDict_n['bottomElevation'][ii][ ind_maxx:ind_minx + 1] = dataDict['bottomElevation'][ii] else: print 'You need to modify makenc_CSHORErun in makenc.py to accept your new version name!' # get rid of all masks test = np.ma.masked_array(dataDict_n['aveE'], np.isnan(dataDict_n['aveE'])) dataDict_n['aveE'] = test del test test = np.ma.masked_array(dataDict_n['stdE'], np.isnan(dataDict_n['stdE'])) dataDict_n['stdE'] = test del test test = np.ma.masked_array(dataDict_n['aveN'], np.isnan(dataDict_n['aveN'])) dataDict_n['aveN'] = test del test test = np.ma.masked_array(dataDict_n['stdN'], np.isnan(dataDict_n['stdN'])) dataDict_n['stdN'] = test del test test = np.ma.masked_array(dataDict_n['waveHs'], np.isnan(dataDict_n['waveHs'])) dataDict_n['waveHs'] = test del test test = np.ma.masked_array(dataDict_n['waveMeanDirection'], np.isnan(dataDict_n['waveMeanDirection'])) dataDict_n['waveMeanDirection'] = test del test test = np.ma.masked_array(dataDict_n['waterLevel'], np.isnan(dataDict_n['waterLevel'])) dataDict_n['waterLevel'] = test del test test = np.ma.masked_array(dataDict_n['stdWaterLevel'], np.isnan(dataDict_n['stdWaterLevel'])) dataDict_n['stdWaterLevel'] = test del test test = np.ma.masked_array(dataDict_n['setup'], np.isnan(dataDict_n['setup'])) dataDict_n['setup'] = test del test test = np.ma.masked_array(dataDict_n['qbx'], np.isnan(dataDict_n['qbx'])) dataDict_n['qbx'] = test del test test = np.ma.masked_array(dataDict_n['qsx'], np.isnan(dataDict_n['qsx'])) dataDict_n['qsx'] = test del test test = np.ma.masked_array(dataDict_n['qby'], np.isnan(dataDict_n['qby'])) dataDict_n['qby'] = test del test test = np.ma.masked_array(dataDict_n['qsy'], np.isnan(dataDict_n['qsy'])) dataDict_n['qsy'] = test del test test = np.ma.masked_array(dataDict_n['probabilitySuspension'], np.isnan(dataDict_n['probabilitySuspension'])) dataDict_n['probabilitySuspension'] = test del test test = np.ma.masked_array(dataDict_n['probabilityMovement'], np.isnan(dataDict_n['probabilityMovement'])) dataDict_n['probabilityMovement'] = test del test test = np.ma.masked_array(dataDict_n['suspendedSedVolume'], np.isnan(dataDict_n['suspendedSedVolume'])) dataDict_n['suspendedSedVolume'] = test del test test = np.ma.masked_array(dataDict_n['latitude'], np.isnan(dataDict_n['latitude'])) dataDict_n['latitude'] = test del test test = np.ma.masked_array(dataDict_n['longitude'], np.isnan(dataDict_n['longitude'])) dataDict_n['longitude'] = test del test test = np.ma.masked_array(dataDict_n['bottomElevation'], np.isnan(dataDict_n['bottomElevation'])) dataDict_n['bottomElevation'] = test del test # check to see if I screwed up! assert set(dataDict.keys()) == set(dataDict_n.keys( )), 'You are missing dictionary keys in the new dictionary!' # replace the dictionary with the new dictionary del dataDict dataDict = dataDict_n del dataDict_n # now we flip everything that has a spatial dimension around so it will be all pretty like spicer wants? dataDict['aveN'] = np.flip(dataDict['aveN'], 1) dataDict['waveHs'] = np.flip(dataDict['waveHs'], 1) dataDict['aveE'] = np.flip(dataDict['aveE'], 1) dataDict['waveMeanDirection'] = np.flip(dataDict['waveMeanDirection'], 1) dataDict['stdWaterLevel'] = np.flip(dataDict['stdWaterLevel'], 1) dataDict['probabilitySuspension'] = np.flip( dataDict['probabilitySuspension'], 1) dataDict['stdN'] = np.flip(dataDict['stdN'], 1) dataDict['stdE'] = np.flip(dataDict['stdE'], 1) dataDict['bottomElevation'] = np.flip(dataDict['bottomElevation'], 1) dataDict['xFRF'] = np.flip(dataDict['xFRF'], 0) dataDict['qsy'] = np.flip(dataDict['qsy'], 1) dataDict['qsx'] = np.flip(dataDict['qsx'], 1) dataDict['waterLevel'] = np.flip(dataDict['waterLevel'], 1) dataDict['qbx'] = np.flip(dataDict['qbx'], 1) dataDict['qby'] = np.flip(dataDict['qby'], 1) dataDict['setup'] = np.flip(dataDict['setup'], 1) dataDict['longitude'] = np.flip(dataDict['longitude'], 0) dataDict['latitude'] = np.flip(dataDict['latitude'], 0) dataDict['suspendedSedVolume'] = np.flip(dataDict['suspendedSedVolume'], 1) dataDict['probabilityMovement'] = np.flip(dataDict['probabilityMovement'], 1) # write data to file write_data_to_nc(fid, varAtts, dataDict) # close file fid.close()
def makenc_FRFGrid(gridDict, ofname, globalYaml, varYaml): """This is a function that makes netCDF files from the FRF Natural neighbor tool created by Spicer Bak using the pyngl library. the transect dictionary is created using the natural neighbor tool in FRF_natneighbor.py Args: gridDict: data dictionary matching varYaml requires 'zgrid', 'ygrid', 'xgrid', 'StateplaneE', 'StateplaneN', 'Lat', 'Lon', 'FRF_X', 'FRF_Y' globalYaml: global meta data yaml file name ofname: the file name to be created varYaml: variable meta data yaml file name Returns: netCDF file with gridded data in it """ from testbedutils import geoprocess as gp # this might be creating a circular import globalAtts = import_template_file(globalYaml) varAtts = import_template_file(varYaml) # create netcdf file fid = init_nc_file(ofname, globalAtts) # creating dimensions of data xShore = fid.createDimension('xShore', np.shape(gridDict['zgrid'])[0]) yShore = fid.createDimension('yShore', np.shape(gridDict['zgrid'])[1]) time = fid.createDimension('time', np.size(gridDict['time'])) # creating lat/lon and state plane coords #xgrid, ygrid = np.meshgrid(gridDict['xgrid'], gridDict['ygrid']) xx, yy = np.meshgrid(gridDict['xgrid'], gridDict['ygrid']) latGrid = np.zeros(np.shape(yy)) lonGrid = np.zeros(np.shape(xx)) statePlN = np.zeros(np.shape(yy)) statePlE = np.zeros(np.shape(xx)) for iy in range(0, np.size(gridDict['zgrid'], axis=1)): for ix in range(0, np.size(gridDict['zgrid'], axis=0)): coords = gp.FRFcoord(xx[iy, ix], yy[iy, ix]) #, grid[iy, ix])) statePlE[iy, ix] = coords['StateplaneE'] statePlN[iy, ix] = coords['StateplaneN'] latGrid[iy, ix] = coords['Lat'] lonGrid[iy, ix] = coords['Lon'] assert xx[iy, ix] == coords['FRF_X'] assert yy[iy, ix] == coords['FRF_Y'] # put these data into the dictionary that matches the yaml gridDict['Latitude'] = latGrid[:, 0] gridDict['Longitude'] = lonGrid[0, :] gridDict['Easting'] = statePlE[:, 0] gridDict['Northing'] = statePlN[0, :] gridDict['FRF_Xshore'] = gridDict.pop('xgrid') gridDict['FRF_Yshore'] = gridDict.pop('ygrid') # addding 3rd dimension for time a = gridDict.pop('zgrid').T gridDict['Elevation'] = np.full([1, a.shape[0], a.shape[1]], fill_value=[a], dtype=np.float32) # write data to file write_data_to_nc(fid, varAtts, gridDict) # close file fid.close()
def CMSsimSetup(startTime, inputDict): """This Function is the master call for the data preparation for the Coastal Model Test Bed (CMTB) and the CMS wave/FLow model NOTE: input to the function is the end of the duration. All Files are labeled by this convention all time stamps otherwise are top of the data collection Args: startTime (str): this is a string of format YYYY-mm-ddTHH:MM:SSZ (or YYYY-mm-dd) in UTC time inputDict (dict): this is a dictionary that is read from the yaml read function """ # begin by setting up input parameters if 'simulationDuration' in inputDict: timerun = inputDict['simulationDuration'] else: timerun = 24 if 'pFlag' in inputDict: pFlag = inputDict['pFlag'] else: pFlag = True assert 'version_prefix' in inputDict, 'Must have "version_prefix" in your input yaml' version_prefix = inputDict['version_prefix'] if 'THREDDS' in inputDict: server = inputDict['THREDDS'] else: print('Chosing CHL thredds by Default, this may be slower!') server = 'CHL' TOD = 0 # hour of day simulation to start (UTC) path_prefix = inputDict[ 'path_prefix'] # + "/%s/" %version_prefix # data super directiory # ______________________________________________________________________________ # define version parameters versionlist = ['HP', 'UNTUNED'] assert version_prefix in versionlist, 'Please check your version Prefix' simFnameBackground = inputDict[ 'gridSIM'] # ''/home/spike/cmtb/gridsCMS/CMS-Wave-FRF.sim' backgroundDepFname = inputDict[ 'gridDEP'] # ''/home/spike/cmtb/gridsCMS/CMS-Wave-FRF.dep' CMSinterp = inputDict.get('CMSinterp', 50) # max freq bins for the model fastModeOn = inputDict.get('fastMode', False) # do versioning stuff here if version_prefix == 'HP': full = False elif version_prefix == 'UNTUNED': full = False else: raise NotImplementedError('Check Version Prefix') # _______________________________________________________________________________ # set times try: d1 = DT.datetime.strptime( startTime, '%Y-%m-%dT%H:%M:%SZ') + DT.timedelta(TOD / 24., 0, 0) d2 = d1 + DT.timedelta(0, timerun * 3600, 0) date_str = d1.strftime('%Y-%m-%dT%H%M%SZ') # used to be endtime except ValueError: assert len( startTime ) == 10, 'Your Time does not fit convention, check T/Z and input format' d1 = DT.datetime.strptime(startTime, '%Y-%m-%d') + DT.timedelta( TOD / 24., 0, 0) d2 = d1 + DT.timedelta(0, timerun * 3600, 0) date_str = d1.strftime('%Y-%m-%d') # used to be endtime assert int(timerun) >= 24, 'Running Simulations with less than 24 Hours of simulation time require end ' \ 'Time format in type: %Y-%m-%dT%H:%M:%SZ' if type(timerun) == str: timerun = int(timerun) # __________________Make Diretories_____________________________________________ # if not os.path.exists(path_prefix + date_str): # if it doesn't exist os.makedirs(path_prefix + date_str) # make the directory if not os.path.exists(path_prefix + date_str + "/figures/"): os.makedirs(path_prefix + date_str + "/figures/") print "Model Time Start : %s Model Time End: %s" % (d1, d2) print u"OPERATIONAL files will be place in {0} folder".format(path_prefix + date_str) # ______________________________________________________________________________ # begin model data gathering ## _____________WAVES____________________________ go = getObs(d1, d2, THREDDS=server) # initialize get observation print '_________________\nGetting Wave Data' rawspec = go.getWaveSpec(gaugenumber=0) assert rawspec is not None, "\n++++\nThere's No Wave data between %s and %s \n++++\n" % ( d1, d2) prepdata = STPD.PrepDataTools() # rotate and lower resolution of directional wave spectra wavepacket = prepdata.prep_spec( rawspec, version_prefix, datestr=date_str, plot=pFlag, full=full, outputPath=path_prefix, CMSinterp=CMSinterp) # freq bands are max for model print "number of wave records %d with %d interpolated points" % (np.shape( wavepacket['spec2d'])[0], wavepacket['flag'].sum()) ## _____________WINDS______________________ print '_________________\nGetting Wind Data' try: rawwind = go.getWind(gaugenumber=0) # average and rotate winds windpacket = prepdata.prep_wind(rawwind, wavepacket['epochtime']) # wind height correction print 'number of wind records %d with %d interpolated points' % ( np.size(windpacket['time']), sum(windpacket['flag'])) except (RuntimeError, TypeError): windpacket = None print ' NO WIND ON RECORD' ## ___________WATER LEVEL__________________ print '_________________\nGetting Water Level Data' try: # get water level data rawWL = go.getWL() # average WL WLpacket = prepdata.prep_WL(rawWL, wavepacket['epochtime']) print 'number of WL records %d, with %d interpolated points' % ( np.size(WLpacket['time']), sum(WLpacket['flag'])) except (RuntimeError, TypeError): WLpacket = None ### ____________ Get bathy grid from thredds ________________ gdTB = getDataTestBed(d1, d2) # bathy = gdTB.getGridCMS(method='historical') bathy = gdTB.getBathyIntegratedTransect( method=1) # , ForcedSurveyDate=ForcedSurveyDate) bathy = prepdata.prep_CMSbathy(bathy, simFnameBackground, backgroundGrid=backgroundDepFname) ### ___________ Create observation locations ________________ # these are cell i/j locations gaugelocs = [] #get gauge nodes x/y for gauge in go.gaugelist: pos = go.getWaveGaugeLoc(gauge) coord = gp.FRFcoord(pos['Lon'], pos['Lat'], coordType='LL') i = np.abs(coord['xFRF'] - bathy['xFRF'][::-1]).argmin() j = np.abs(coord['yFRF'] - bathy['yFRF'][::-1]).argmin() gaugelocs.append([i, j]) ## begin output cmsio = inputOutput.cmsIO() # initializing the I/o Script writer stdFname = os.path.join(path_prefix, date_str, date_str + '.std') # creating file names now simFnameOut = os.path.join(path_prefix, date_str, date_str + '.sim') specFname = os.path.join(path_prefix, date_str, date_str + '.eng') bathyFname = os.path.join(path_prefix, date_str, date_str + '.dep') gridOrigin = (bathy['x0'], bathy['y0']) cmsio.writeCMS_std(fname=stdFname, gaugeLocs=gaugelocs, fastMode=fastModeOn) cmsio.writeCMS_sim(simFnameOut, date_str, gridOrigin) cmsio.writeCMS_spec(specFname, wavePacket=wavepacket, wlPacket=WLpacket, windPacket=windpacket) cmsio.writeCMS_dep(bathyFname, depPacket=bathy) stio = inputOutput.stwaveIO('') inputOutput.write_flags(date_str, path_prefix, wavepacket, windpacket, WLpacket, curpacket=None) # remove old output files so they're not appended, cms defaults to appending output files try: os.remove(os.path.join(path_prefix, date_str, cmsio.waveFname)) os.remove(os.path.join(path_prefix, date_str, cmsio.selhtFname)) os.remove(os.path.join(path_prefix + date_str, cmsio.obseFname)) except OSError: # there are no files to delete pass
def plotAltimeterSummary(minTime, maxTime, **kwargs): """Function that makes plots for altimeters and beach change. Args: minTime: start time in epoch maxTime: end time in epoch Keyword Arguments: frontEnd: utilizes the switch between chl and FRF servers, default will use FRF server """ plt.style.use(['seaborn-poster']) chlFront = "https://chldata.erdc.dren.mil/thredds/dodsC/frf/" frfFront = "http://bones/thredds/dodsC/FRF/" frontEnd = kwargs.get('frontEnd', frfFront) url1 = "geomorphology/altimeter/Alt940-340-altimeter/Alt940-340-altimeter.ncml" url2 = "geomorphology/altimeter/Alt940-250-altimeter/Alt940-250-altimeter.ncml" url3 = "geomorphology/altimeter/Alt940-200-altimeter/Alt940-200-altimeter.ncml" # 860 line url4 = "geomorphology/altimeter/Alt861-350-altimeter/Alt861-350-altimeter.ncml" url5 = "geomorphology/altimeter/Alt861-300-altimeter/Alt861-300-altimeter.ncml" url6 = "geomorphology/altimeter/Alt861-250-altimeter/Alt861-250-altimeter.ncml" url7 = "geomorphology/altimeter/Alt861-200-altimeter/Alt861-200-altimeter.ncml" url8 = "geomorphology/altimeter/Alt861-150-altimeter/Alt861-150-altimeter.ncml" # 769 line url9 = "geomorphology/altimeter/Alt769-350-altimeter/Alt769-350-altimeter.ncml" url10 = "geomorphology/altimeter/Alt769-300-altimeter/Alt769-300-altimeter.ncml" url11 = "geomorphology/altimeter/Alt769-250-altimeter/Alt769-250-altimeter.ncml" url12 = "geomorphology/altimeter/Alt769-200-altimeter/Alt769-200-altimeter.ncml" url13 = "geomorphology/altimeter/Alt769-150-altimeter/Alt769-150-altimeter.ncml" # original Line url14 = "geomorphology/altimeter/Alt03-altimeter/Alt03-altimeter.ncml" url15 = "geomorphology/altimeter/Alt04-altimeter/Alt04-altimeter.ncml" url16 = "geomorphology/altimeter/Alt05-altimeter/Alt05-altimeter.ncml" # get wave record and smush to one record go = getDataFRF.getObs(DT.datetime.fromtimestamp(minTime), DT.datetime.fromtimestamp(maxTime)) w26 = go.getWaveSpec('waverider-26m') w17 = go.getWaveSpec('waverider-17m') if w17 is not None: w17New = sb.reduceDict( w17, np.argwhere(~np.in1d(w17['time'], w26['time'])).squeeze()) gm = getDataFRF.getDataTestBed( DT.datetime.fromtimestamp(minTime - 365 * 24 * 60 * 60), DT.datetime.fromtimestamp(maxTime)) bathy = gm.getBathyIntegratedTransect(forceReturnAll=True, xbounds=[0, 500], ybounds=[600, 1000]) diffTime, rmses = [], [] for tt in range(bathy['time'].shape[0] - 1): rmses.append( np.sqrt((np.square(bathy['elevation'][tt + 1] - bathy['elevation'][tt])).mean())) timeDiffTemp = bathy['time'][tt + 1] - bathy['time'][tt] diffTime.append(timeDiffTemp / 2 + bathy['time'][tt]) ## plot z by time (add artificial offset in elevation for each cross-shore gauge) multiplier = 10 maxTimeDT = nc.num2date(maxTime, 'seconds since 1970-01-01') minTimeDT = nc.num2date(minTime, 'seconds since 1970-01-01') marker = 2 # marker size for wave plot lw = 1 ####################################################################################### fig = plt.figure(figsize=(12, 12)) plt.suptitle( 'RMSE calculated between\nyBounds = [600, 1000] xBounds = [0, 500]\nto avoid pier hole' ) ax0 = plt.subplot2grid((8, 8), (0, 0), colspan=8, rowspan=1) ax0.plot(w26['time'], w26['Hs'], 'm.', label='26m', ms=marker) if w17New is not None: ax0.plot(w17New['time'], w17New['Hs'], 'r.', label='17m', ms=marker) ax0.set_ylabel('wave\nheight [m]', fontsize=12) plt.legend() plt.gca().axes.get_xaxis().set_visible(False) ############################## ax00 = plt.subplot2grid((8, 8), (1, 0), colspan=8, rowspan=1, sharex=ax0) ax00.plot(diffTime, rmses, color='black', marker="_", linestyle='solid', ms=150, linewidth=lw) ax00.set_ylabel('RMSE between\nsurveys [m]', fontsize=12) for tt, time in enumerate(bathy['time']): ax00.plot([time, time], [0, max(rmses)], 'C1', linestyle='dashdot', linewidth=lw) plt.gca().axes.get_xaxis().set_visible(False) ############################## ax1 = plt.subplot2grid((8, 8), (2, 0), colspan=8, rowspan=6, sharex=ax0) for uu, url in enumerate([url1, url2, url3, url14, url15, url16]): print(frontEnd + url) ncfile = nc.Dataset(frontEnd + url) try: time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units) bottom0 = ncfile['bottomElevation'][:] except: bottom0 = ncfile['bottomElevation'][:-1] time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units) coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:]) ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'b.', ms=marker) if url is url16: ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'b.', label='940m', ms=marker) ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']], 'b', linestyle='dotted', linewidth=lw) ############################## for url in [url4, url5, url6, url7, url8]: #, print(url) ncfile = nc.Dataset(frontEnd + url) try: time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units) bottom0 = ncfile['bottomElevation'][:] except: bottom0 = ncfile['bottomElevation'][:-1] time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units) coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:]) ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'r.', ms=marker) if url is url8: ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'r.', label='861m', ms=marker) ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']], 'r', linestyle='dotted', linewidth=lw) ############################## for url in [url9, url10, url11, url12, url13]: print(url) ncfile = nc.Dataset(frontEnd + url) try: time0 = nc.num2date(ncfile['time'][:], ncfile['time'].units) bottom0 = ncfile['bottomElevation'][:] except: bottom0 = ncfile['bottomElevation'][:-1] time0 = nc.num2date(ncfile['time'][:-1], ncfile['time'].units) coord = gp.FRFcoord(ncfile['Longitude'][:], ncfile['Latitude'][:]) ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'c.', ms=marker) if url is url13: ax1.plot(time0, np.tile(coord['xFRF'], len(bottom0)) + bottom0 * multiplier, 'c.', label='769m', ms=marker) ax1.plot([minTimeDT, maxTimeDT], [coord['xFRF'], coord['xFRF']], 'c', linestyle='dotted', linewidth=lw) ############################## ax1.set_xlim([minTimeDT, maxTimeDT]) ax1.legend() plt.setp(plt.xticks()[1], rotation=30, ha='right') ax1.set_xlabel('time') ax1.set_ylabel( 'frf cross-shore location (with change in elevation plotted)', fontsize=12) fname = '/todaysPlots/TodaysAltimeterSummary_{}.png'.format( kwargs.get('duration', '')) plt.savefig(fname) print('Saved File Here: {}'.format(fname)) plt.close() shutil.copy(fname, '/mnt/gaia/rootdir/CMTB/')
url = "http://134.164.129.55/thredds/dodsC/cmtb/grids/TimeMeanBackgroundDEM/backgroundDEMt0_TimeMean.nc" ncfile = nc.Dataset(url) idxX = (ncfile['xFRF'][:] > xbounds[0]) & (ncfile['xFRF'][:] < xbounds[1]) idxY = (ncfile['yFRF'][:] > ybounds[0]) & (ncfile['yFRF'][:] < ybounds[1]) meanElevation = ncfile['elevation'][idxY, idxX] meanXfrf = ncfile['xFRF'][idxX] meanYfrf = ncfile['yFRF'][idxY] ########### load jbltx data for background fname = "/home/spike/repos/pyObjectiveMapping/Job556221_nc2019_dunex.tif" f = gdal.Open(fname) one = f.GetRasterBand(1).ReadAsArray() one = np.ma.array(one, mask=one == -999999) upperLeftX, xRes, _, upperLeftY, _, yRes = f.GetGeoTransform() lons = np.arange(upperLeftX, xRes * one.shape[1] + upperLeftX, xRes) lats = np.arange(upperLeftY, yRes * one.shape[0] + upperLeftY, yRes) # create unique points xxLons, yyLats = np.meshgrid(lons, lats) xFRF, yFRF = [], [] for coord in zip(xxLons.flatten(), yyLats.flatten()): coordOut = gp.FRFcoord(coord[0], coord[1]) xFRF.append(coordOut['xFRF']) yFRF.append(coordOut['yFRF']) xxFRF, yyFRF = np.meshgrid(xFRF, yFRF) ######## # improj = f.GetProjection() # inproj_B05 = osr.SpatialReference() # inproj_B05.ImportFromWkt(improj) # projcs_B05 = inproj_B05.GetAuthorityCode('PROJCS') # projection_B05 = ccrs.epsg(projcs_B05)