def test_getncmatchingvarlist(): """ this test tests retrieves a pandas list of variable long names in a netcdf that match the pattern, useful for waq variables. """ from dfm_tools.get_nc_helpers import get_ncvardimlist file_nc = os.path.join(dir_testinput,r'DFM_3D_z_Grevelingen\computations\run01\DFM_OUTPUT_Grevelingen-FM\Grevelingen-FM_0000_map.nc') vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc) pattern = 'Flow .*component' vars_pd_matching = vars_pd[vars_pd.loc[:,'long_name'].str.match(pattern)] #does not have to stop after pattern #vars_pd_matching = vars_pd[vars_pd.loc[:,'long_name'].str.startswith('Flow') & vars_pd.loc[:,'long_name'].str.endswith('component')] varkeys_list_matching = list(vars_pd_matching['nc_varkeys']) assert varkeys_list_matching == ['mesh2d_ucx', 'mesh2d_ucy', 'mesh2d_ucz', 'mesh2d_ucxa', 'mesh2d_ucya']
r'p:\1204257-dcsmzuno\2014\data\meteo\HIRLAM72_2018\h72_201803.nc', r'p:\11202255-sfincs\Testbed\Original_tests\01_Implementation\08_restartfile\sfincs_map.nc', #not available anymore ] for file_nc in file_nc_list: #get cell center coordinates from regular grid if 'ERA5_metOcean_atm' in file_nc: data_fromnc_x_1D = get_ncmodeldata(file_nc=file_nc, varname='longitude') data_fromnc_y_1D = get_ncmodeldata(file_nc=file_nc, varname='latitude') data_fromnc_x, data_fromnc_y = np.meshgrid(data_fromnc_x_1D, data_fromnc_y_1D) else: data_fromnc_x = get_ncmodeldata(file_nc=file_nc, varname='x') data_fromnc_y = get_ncmodeldata(file_nc=file_nc, varname='y') vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc) x_cen_withbnd = center2corner(data_fromnc_x) y_cen_withbnd = center2corner(data_fromnc_y) grid_verts = meshgridxy2verts(x_cen_withbnd, y_cen_withbnd) fig, axs = plt.subplots(2,1, figsize=(10,9)) ax = axs[0] ax.set_title('xy center data converted to xy corners') ax.plot(data_fromnc_x,data_fromnc_y, linewidth=0.5, color='blue') ax.plot(data_fromnc_x.T,data_fromnc_y.T, linewidth=0.5, color='blue') ax.plot(x_cen_withbnd,y_cen_withbnd, linewidth=0.5, color='crimson') ax.plot(x_cen_withbnd.T,y_cen_withbnd.T, linewidth=0.5, color='crimson') ax.set_aspect('equal') ax = axs[1] ax.set_title('xy corner data converted to vertices (useful for map plotting)') plot_netmapdata(grid_verts, values=None, ax=ax, linewidth=0.5, color='crimson', facecolor='None')
def regularGrid_to_netcdf(fp_in, nx, ny, treg, lreg): dir_output = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'output')) if not os.path.exists(dir_output): os.makedirs(dir_output) file_nc = fp_in input_nc = Dataset(file_nc, 'r', format='NetCDF4') time_old = input_nc.variables['time'][:] if treg != 'all': time_old = np.take(time_old, treg) vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc) df = vars_pd key_values = [ 'mesh2d_tem1', 'time', 'mesh2d_s1', 'mesh2d_ucx', 'mesh2d_ucy', 'mesh2d_tem1', 'mesh2d_sa1', 'mesh2d_water_quality_output_17', 'mesh2d_OXY', 'mesh2d_face_x', 'mesh2d_face_y' ] df = df.loc[df['nc_varkeys'].isin(key_values)] """ #################################################################################################################### # Regularise all files with 3 dimensions (time, nFaces, layers). # This will be equal to four dimensions in the regular grid format since nFaces is the x- and y- dimension. #################################################################################################################### """ df2 = df.loc[df['ndims'] == 3] data_frommap_x = get_ncmodeldata(file_nc=file_nc, varname='mesh2d_face_x') data_frommap_y = get_ncmodeldata(file_nc=file_nc, varname='mesh2d_face_y') time = get_ncmodeldata(file_nc=file_nc, varname='time', timestep=treg) outname = '%s_regular.nc' % os.path.split(fileNC)[1][0:-3] file_nc_reg = os.path.join(dir_output, outname) root_grp = Dataset(file_nc_reg, 'w', format='NETCDF4') root_grp.description = 'Example simulation data' first_read = True i = 0 for index, row in df2.iterrows(): if row['dimensions'][1] == 'mesh2d_nEdges': continue data_frommap_var = get_ncmodeldata(file_nc=file_nc, varname=row['nc_varkeys'], timestep=treg, layer=lreg) data_frommap_var = data_frommap_var.filled(np.nan) field_array = np.empty( (data_frommap_var.shape[0], ny, nx, data_frommap_var.shape[-1])) tms = data_frommap_var.shape[0] lrs = data_frommap_var.shape[-1] trange = range(0, tms) lrange = range(0, lrs) A = np.array([ scatter_to_regulargrid(xcoords=data_frommap_x, ycoords=data_frommap_y, ncellx=nx, ncelly=ny, values=data_frommap_var[t, :, l].flatten(), method='linear') for t in trange for l in lrange ]) x_grid = A[0][0] y_grid = A[0][1] A = A[:, 2, :, :] A = np.moveaxis(A, [0], [2]) subs = np.split(A, tms, axis=2) field_array[:, :, :, 0:lrs] = [subs[tn] for tn in trange] field_array = np.ma.masked_invalid(field_array) print('done with variable %s' % row['nc_varkeys']) if first_read: unout = 'seconds since 2015-01-01 00:00:00' lon = x_grid[0, :] lat = y_grid[:, 0] # create dimensions root_grp.createDimension('time', None) root_grp.createDimension('lon', lon.shape[0]) root_grp.createDimension('lat', lat.shape[0]) root_grp.createDimension('layer', lrs) lonvar = root_grp.createVariable('lon', 'float32', 'lon') lonvar.setncattr('axis', 'X') lonvar.setncattr('reference', 'geographical coordinates, WGS84 projection') lonvar.setncattr('units', 'degrees_east') lonvar.setncattr('_CoordinateAxisType', 'Lon') lonvar.setncattr('long_name', 'longitude') lonvar.setncattr('valid_max', '180') lonvar.setncattr('valid_min', '-180') lonvar[:] = lon latvar = root_grp.createVariable('lat', 'float32', 'lat') latvar.setncattr('axis', 'Y') latvar.setncattr('reference', 'geographical coordinates, WGS84 projection') latvar.setncattr('units', 'degrees_north') latvar.setncattr('_CoordinateAxisType', 'Lat') latvar.setncattr('long_name', 'latitude') latvar.setncattr('valid_max', '90') latvar.setncattr('valid_min', '-90') latvar[:] = lat layervar = root_grp.createVariable('layer', 'float32', 'layer') layervar.setncattr('axis', 'Z') layervar.setncattr('reference', 'geographical coordinates, WGS84 projection') layervar.setncattr('units', 'm') layervar.setncattr('_CoordinateZisPositive', 'down') layervar.setncattr('_CoordinateAxisType', 'Height') layervar.setncattr('long_name', 'Depth') layervar[:] = range(0, lrs) timevar = root_grp.createVariable('time', 'float64', 'time') timevar.setncattr('units', unout) timevar.setncattr('calendar', 'standard') timevar.setncattr('long_name', 'time') timevar.setncattr('_CoordinateAxisType', 'Time') timevar[:] = time_old fieldName = row['nc_varkeys'] fieldvar = root_grp.createVariable(fieldName, 'float32', ('time', 'lat', 'lon', 'layer'), fill_value=-999) key = fieldName for ncattr in input_nc.variables[key].ncattrs(): if ncattr != "_FillValue": root_grp.variables[fieldName].setncattr( ncattr, input_nc.variables[key].getncattr(ncattr)) fieldvar[:] = field_array first_read = False i += 1 """ #################################################################################################################### # Regularise all files with 2 dimensions (time, nFaces, layers). # This will be equal to 3 dimensions in the regular grid format since nFaces is the x- and y- dimension. #################################################################################################################### """ print('STARTING 2D') df2 = df.loc[df['ndims'] == 2] excludeList = ['edge', 'face', 'x', 'y'] for index, row in df2.iterrows(): test = any(n in str(row['nc_varkeys']) for n in excludeList) if not test: if row['dimensions'][1] == 'mesh2d_nEdges': continue ntimes = row['shape'][0] data_frommap_var = get_ncmodeldata(file_nc=file_nc, varname=row['nc_varkeys'], timestep=treg) data_frommap_var = data_frommap_var.filled(np.nan) field_array = np.empty((data_frommap_var.shape[0], ny, nx)) trange = range(0, data_frommap_var.shape[0]) tms = data_frommap_var.shape[0] A = np.array([ scatter_to_regulargrid(xcoords=data_frommap_x, ycoords=data_frommap_y, ncellx=nx, ncelly=ny, values=data_frommap_var[t, :].flatten(), method='linear') for t in trange ]) A = A[:, 2, :, :] field_array[:, :, :] = A field_array = np.ma.masked_invalid(field_array) """write data to new netcdf""" fieldName = row['nc_varkeys'] fieldvar = root_grp.createVariable(fieldName, 'float32', ('time', 'lat', 'lon'), fill_value=-999) key = fieldName for ncattr in input_nc.variables[key].ncattrs(): if ncattr != "_FillValue": root_grp.variables[fieldName].setncattr( ncattr, input_nc.variables[key].getncattr(ncattr)) fieldvar[:] = field_array root_grp.close()