def test_uvicswan_spc(): global num_stations_to_test, num_timesteps_to_test # load from s3 bucket swan = NetCDFSWAN(inputJson) stations = json.loads(swan.info()["metadata"]["stations"]) # check stations for station_name, values in stations.items(): if num_stations_to_test <= 0: break num_stations_to_test -= 1 s = random.randrange(values["start"], values["end"]) s_offset = s-values["start"] # may get snode in the "middle" of the original file # create random timesteps to check for rndt in range(num_timesteps_to_test): y = random.randrange(2004, 2017) m = random.randrange(1, 13) d = random.randrange(1, 29) h = random.randrange(0, 24) t = int(date2num(datetime(y,m,d,h,0), units=u, calendar=c)) - tSTART t_offset = t - int(date2num(datetime(y,m,1,0,0), units=u, calendar=c)) + tSTART # 0-744, because cyclical every month folder # For the .mat files above, the time index is specified in the name of the key-string, so getting the data is straightforward. # But to get the timestep (t) in the .spc file we need to find the t_offset, which is relative to the start time of the .spc file. dateprint = num2date(t+tSTART, units=u, calendar=c) sfilepath = data_location+"/"+str(y)+"/"+f"{m:02d}"+"/results/"+station_name+".spc" try: spcdata = swan.loadSpc(sfilepath, monthOnly=m)["spectra"] except: print(f"couldnt read {sfilepath}"); continue local_snodes = spcdata[t_offset, s_offset, :, :] # time, nodes, frequency, direction rmote_snodes = swan["spc", "spectra", s, t][0][0] # otherwise we get [[[ data ]]] try: np.testing.assert_array_equal(local_snodes, rmote_snodes) print(f"snode {s} (offset={s_offset}) - {station_name} at {dateprint}. local file shape={spcdata.shape} t={t} (offset={t_offset}) OK") except AssertionError as ae: print(f"snode {s} (offset={s_offset}) - {station_name} at {dateprint}. local file shape={spcdata.shape} t={t} (offset={t_offset}) does NOT match bucket data")
def test_NetCDFSWAN_write(): swanFolder="../s3/swandata" jsonFile='./test/json/demo.json' input=NetCDFSWAN.prepareInputJSON(jsonFile,swanFolder,year=2000,month=1) swan=NetCDFSWAN(input) # Write # swan.uploadStatic(year=2000) swan.uploadS() swan.uploadT()
def check_data(): for mkey in variables.keys(): for mmkey in list(variables[mkey].keys()): for month in range(1, 13): NCDFS = np.array( NetCDFSWAN.load( os.path.join( tmpFolder, f'2000/{str(month)}/results/{mkey}.mat'))[mmkey]) start = int(NCDFS[0][0] // 10) # actual value is similar to the index end = start + NCDFS.shape[0] v = variables[mkey][mmkey][start:end] # 0 - 745 np.testing.assert_array_equal(NCDFS, v) print(f"{mkey}.mat ok") for i, station in enumerate(stations): n = stations[station]["nsnodes"] sts = 0 for month in range(1, 13): NCDFS = NetCDFSWAN.load( os.path.join( tmpFolder, f'2000/{str(month)}/results/{station}.spc'))["spectra"] ts = NCDFS.shape[0] spg = spcgroup["spectra"][i, :n] for node in range(n): spg_n = spg[node][sts:sts + ts] NCDFS_n = NCDFS[:, node] np.testing.assert_array_equal(spg_n, NCDFS_n) #print(f"station {station} node {node}:", NCDFS_n.shape, spg_n.shape, " start index:", sts) sts += ts - 1 print(f"{station}.spc ok")
def test_uvicswan_mat(): global num_timesteps_to_test # load from s3 bucket swan = NetCDFSWAN(inputJson) # trial list. Only chooses a few of them mats = { "u10":("WIND", "Windv_x"), "v10":("WIND", "Windv_y"), "tps":("TPS", "TPsmoo"), "tm01":("TM01", "Tm01"), "dir":("DIR", "Dir") } for mat in mats.items(): var, mVAR, mvar = mat[0], mat[1][0], mat[1][1] # create random timesteps to check for i in range(num_timesteps_to_test): y = random.randrange(2004, 2017) m = random.randrange(1, 13) d = random.randrange(1, 29) h = random.randrange(0, 24) t = int(date2num(datetime(y,m,d,h,0), units=u, calendar=c)) - tSTART dateprint = num2date(t+tSTART, units=u, calendar=c) mfilepath = data_location +"/"+ str(y)+"/"+f"{m:02d}"+"/results/"+mVAR+".mat" try: matdata = loadmat(mfilepath) except: print(f"couldnt read {mfilepath}"); continue key = mvar+"_"+str(y)+f"{m:02d}"+f"{d:02d}"+"_"+f"{h:02d}"+"0000" local_nodes = matdata[key][0] rmote_nodes = swan["s", var, t][0] np.testing.assert_array_equal(local_nodes, rmote_nodes) print(f"{key} {dateprint} OK")
def test_getFiles(): print(NetCDFSWAN.getFiles(tmpFolder))
def test_preparingFunctions(): obj=NetCDFSWAN.prepareInputJSON('./test/json/demo.json',tmpFolder,year=2000) with open(os.path.join(tmpFolder,"demo.prepared.json"),"w+") as f: json.dump(obj,f,indent=2)
def test_getSpectralStationMetadata(): obj=NetCDFSWAN.getSpectralStationMetadata(tmpFolder,year=2000) with open(os.path.join(tmpFolder,"demo.spectral.json"),"w+") as f: json.dump(obj,f,indent=2)
def test_printInfo(): NetCDFSWAN.printMatKeys(tmpFolder,year=2000) NetCDFSWAN.printSpcShape(tmpFolder,year=2000)
def test_load(): print(NetCDFSWAN.load(os.path.join(tmpFolder,"2000/1/results/HS.mat"))['Hsig'].shape) print(NetCDFSWAN.load(os.path.join(tmpFolder,"2000/1/results/WIND.mat"))) print(NetCDFSWAN.load(os.path.join(tmpFolder,"Mesh/dummy.bot")).shape) print(NetCDFSWAN.load(os.path.join(tmpFolder,"Mesh/dummy.ele")).shape)
import os import json from netcdfswan import NetCDFSWAN import numpy as np if __name__ == "__main__": swanFolder = '../data' jsonFile = 'BCSWANv5/BCSWANv5.json' input = NetCDFSWAN.prepareInputJSON(jsonFile, swanFolder, year=2004, month=1) NetCDFSWAN(input) # input={ # "name":"SWANv5", # "swanFolder":'../s3', # "bucket":"uvic-bcwave", # "showProgress":True, # "memorySize":40, # "cacheSize":100, # "cacheLocation":"../s3", # "localOnly":True # } # with NetCDFSWAN(input) as swan: # ntime = swan.obj['dimensions'].get('ntime') # startDate = swan.obj['metadata'].get('startDate') # timeStep = swan.obj['metadata'].get('timeStep(h)') # startDate=np.datetime64(startDate) # datetime = startDate+np.arange(ntime)*np.timedelta64(timeStep, 'h') # swan['time','time']=datetime
def test_NetCDFSWAN_logger(): logging.basicConfig( filename=os.path.join('./data',"progress.log"), level=logging.DEBUG, format="%(levelname)s %(asctime)s %(message)s" ) logger = logging.getLogger() try: swanFolder="../s3/swandata" jsonFile='./test/json/demo.json' input=NetCDFSWAN.prepareInputJSON(jsonFile,swanFolder,year=2000,month=1) swan=NetCDFSWAN(input,logger=logger) swan.uploadStatic() swan.uploadS() swan.uploadT() swan.uploadSpc() except Exception as err: logger.error(err)
def test_NetCDFSWAN(): input={ "name":"swan-test1", "bucket":"uvic-bcwave", "cacheLocation":"../s3", "localOnly":True } swan=NetCDFSWAN(input) # Read np.testing.assert_array_equal(swan["nodes","bed"], bed) np.testing.assert_array_equal(swan["elem","elem"], elem) np.testing.assert_array_equal(swan["time","time"], time) np.testing.assert_array_equal(swan["nodes","lat"], lat) np.testing.assert_array_equal(swan["nodes","lon"], lon) np.testing.assert_array_equal(swan["freq","freq"], freq) np.testing.assert_array_equal(swan["dir","dir"], dir) np.testing.assert_array_equal(swan["snodes","slon",0:11], [0,1,2,3,4,5,6,6,7,7,8]) np.testing.assert_array_equal(swan["snodes","slat",0:11], [0,0,0,0,0,0,0,1,0,1,0]) np.testing.assert_array_equal(swan["snodes","stationid",0:11], [0,1,2,3,4,5,6,6,7,7,8]) np.testing.assert_array_equal(swan["stations","name",0:2], ["beverly","brooks"]) np.testing.assert_array_equal(swan["s","u10"], variables['WIND']['Windv_x']) np.testing.assert_array_equal(swan["s","v10"], variables['WIND']['Windv_y']) np.testing.assert_array_equal(swan["s","hs"], variables['HS']['Hsig']) np.testing.assert_array_equal(swan["s","tps"], variables['TPS']['TPsmoo']) np.testing.assert_array_equal(swan["s","tmm10"], variables['TMM10']['Tm_10']) np.testing.assert_array_equal(swan["s","tm01"], variables['TM01']['Tm01']) np.testing.assert_array_equal(swan["s","tm02"], variables['TM02']['Tm02']) np.testing.assert_array_equal(swan["s","pdir"], variables['PDIR']['Pdir']) np.testing.assert_array_equal(swan["s","dir"], variables['DIR']['Dir']) np.testing.assert_array_equal(swan["s","dspr"], variables['DSPR']['Dspr']) np.testing.assert_array_equal(swan["s","qp"], variables['QP']['Qp']) np.testing.assert_array_equal(swan["s","transpx"], variables['TRANSP']['Transp_x']) np.testing.assert_array_equal(swan["s","transpy"], variables['TRANSP']['Transp_y']) np.testing.assert_array_equal(swan["t","u10"], variables['WIND']['Windv_x'].T) np.testing.assert_array_equal(swan["t","v10"], variables['WIND']['Windv_y'].T) np.testing.assert_array_equal(swan["t","hs"], variables['HS']['Hsig'].T) np.testing.assert_array_equal(swan["t","tps"], variables['TPS']['TPsmoo'].T) np.testing.assert_array_equal(swan["t","tmm10"], variables['TMM10']['Tm_10'].T) np.testing.assert_array_equal(swan["t","tm01"], variables['TM01']['Tm01'].T) np.testing.assert_array_equal(swan["t","tm02"], variables['TM02']['Tm02'].T) np.testing.assert_array_equal(swan["t","pdir"], variables['PDIR']['Pdir'].T) np.testing.assert_array_equal(swan["t","dir"], variables['DIR']['Dir'].T) np.testing.assert_array_equal(swan["t","dspr"], variables['DSPR']['Dspr'].T) np.testing.assert_array_equal(swan["t","qp"], variables['QP']['Qp'].T) np.testing.assert_array_equal(swan["t","transpx"], variables['TRANSP']['Transp_x'].T) np.testing.assert_array_equal(swan["t","transpy"], variables['TRANSP']['Transp_y'].T) for name in swan.stations: id = swan.stations[name]['id'] sIndex=swan.stations[name]['start'] eIndex=swan.stations[name]['end'] np.testing.assert_array_equal(swan["spc", "spectra",sIndex:eIndex], spcgroup["spectra"][sIndex:eIndex])
input={ "name":"SWANv6", "swanFolder":'../data', "bucket":"uvic-bcwave", "showProgress":True, "memorySize":40, "cacheSize":100, "cacheLocation":"../data", "localOnly":False } with NetCDFSWAN(input) as swan: swan.uploadPt("hspt") # ntime = swan.obj['dimensions'].get('ntime') # startDate = swan.obj['metadata'].get('startDate') # timeStep = swan.obj['metadata'].get('timeStep(h)') # startDate=np.datetime64(startDate) # datetime = startDate+np.arange(ntime)*np.timedelta64(timeStep, 'h') # swan['time','time']=datetime # print(datetime) # print(swan['time','time']) # dirbin=np.array([265,255,245,235,225,215,205,195,185,175,165,155,145,135,125,115,105, 95, 85, 75, 65, 55, 45, 35, 25, 15, 5, -5,-15,-25,-35,-45,-55,-65,-75,-85]) # swan['dirbin','dirbin']=dirbin # swan.uploadStatic(year=2004)