def save_netcdf(times, us, vs, depths, station, lons, lats, to, tf): """Saves the u/v time series over volume into a netcdf file""" path = '/ocean/nsoontie/MEOPAR/TidalEllipseData/ModelTimeSeries/' fname = '{}_currents_{}_{}.nc'.format(station, to.strftime('%Y%m%d'), tf.strftime('%Y%m%d')) print(fname) nc_file = nc.Dataset(os.path.join(path, fname), 'w') # dataset attributes nc_tools.init_dataset_attrs(nc_file, title='{} currents'.format(station), notebook_name='N/A', nc_filepath=os.path.join(path, fname), comment='Generated for tidal analysis') # dimensions nc_file.createDimension('time_counter', None) nc_file.createDimension('deptht', us.shape[1]) nc_file.createDimension('y', us.shape[2]) nc_file.createDimension('x', us.shape[3]) # variables # time_counter time_counter = nc_file.createVariable('time_counter', 'float64', ('time_counter')) time_counter.long_name = 'Time axis' time_counter.axis = 'T' time_counter.units = 'hour since {}'.format(NodalCorr['reftime']) # lat, lon lon = nc_file.createVariable('nav_lon', 'float32', ('y', 'x'), zlib=True) lon[:] = lons[:] lat = nc_file.createVariable('nav_lat', 'float32', ('y', 'x'), zlib=True) lat[:] = lats[:] # u, v u = nc_file.createVariable('vozocrtx', 'float32', ('time_counter', 'deptht', 'y', 'x'), zlib=True) u.units = 'm/s' u.long_name = 'Zonal Velocity' u.coordinates = 'time_counter, depth' v = nc_file.createVariable('vomecrty', 'float32', ('time_counter', 'deptht', 'y', 'x'), zlib=True) v.units = 'm/s' v.long_name = 'Meridional Velocity' v.coordinates = 'time_counter, deptht' # depth depth = nc_file.createVariable('deptht', 'float32', ('deptht'), zlib=True) depth.units = 'm' depth.long_name = 'Depth' depth.coordinates = 'deptht' u[:] = us v[:] = vs depth[:] = depths time_counter[:] = times nc_file.close()
def save_netcdf_depthprofile(tide, depths, const, lons, lats, to, tf): fname = '{}_CODAR_baroclinic_tides_{}_{}.nc'.format(const, to.strftime('%Y%m%d'), tf.strftime('%Y%m%d')) nc_file = nc.Dataset(fname, 'w', zlib=True) # dataset attributes nc_tools.init_dataset_attrs( nc_file, title='{} baroclinic tides'.format(const), notebook_name='N/A', nc_filepath='/data/nsoontie/MEOPAR/analysis/Nancy/tides/' + fname, comment='Baroclinic tidal analysis') # dimensions temp = tide['Phase'] nc_file.createDimension('deptht', temp.shape[0]) nc_file.createDimension('y', temp.shape[1]) nc_file.createDimension('x', temp.shape[2]) # variables # lat, lon lon = nc_file.createVariable('nav_lon', 'float32', ('y', 'x'), zlib=True) lon[:] = lons[:] lat = nc_file.createVariable('nav_lat', 'float32', ('y', 'x'), zlib=True) lat[:] = lats[:] # sema sema = nc_file.createVariable('Semi-Major', 'float32', ('deptht', 'y', 'x'), zlib=True) sema.units = 'm/s' sema.long_name = 'Semi-Major Axis (m/s)' # semi semi = nc_file.createVariable('Semi-Minor', 'float32', ('deptht', 'y', 'x'), zlib=True) semi.units = 'm/s' semi.long_name = 'Semi-Minor Axis (m/s)' # phase pha = nc_file.createVariable('Phase', 'float32', ('deptht', 'y', 'x'), zlib=True) pha.units = 'deg GMT' pha.long_name = 'Phase' # inclination inc = nc_file.createVariable('Inclination', 'float32', ('deptht', 'y', 'x'), zlib=True) inc.units = 'deg CCW E' inc.long_name = 'Inclincation' # depth depth = nc_file.createVariable('deptht', 'float32', ('deptht'), zlib=True) depth.units = 'm' depth.long_name = 'Depth' depth.coordinates = 'deptht' sema[:] = tide['Semi-Major Axis'][:] semi[:] = tide['Semi-Minor Axis'][:] depth[:] = depths[:] pha[:] = tide['Phase'][:] inc[:] = tide['Inclination'][:] nc_file.close()
def test_init_dataset_attrs_quiet(mock_nhu, mock_nfhu, capsys, nc_dataset): """init_dataset_attrs prints no output when quiet=True """ nc_tools.init_dataset_attrs(nc_dataset, 'Test Dataset', 'TestDatasetNotebook', 'test_dataset.nc', quiet=True) out, err = capsys.readouterr() assert out == ''
def test_init_dataset_attrs_no_oversrite( mock_nhu, mock_nfhu, capsys, nc_dataset, ): """init_dataset_attrs does not overwrite existing attrs """ nc_dataset.Conventions = 'CF-1.6' nc_tools.init_dataset_attrs(nc_dataset, 'Test Dataset', 'TestDatasetNotebook', 'test_dataset.nc') out, err = capsys.readouterr() assert out.splitlines()[0] == ( 'Existing attribute value found, not overwriting: Conventions')
def test_init_dataset_attrs_no_oversrite_quiet( mock_nhu, mock_nfhu, capsys, nc_dataset, ): """init_dataset_attrs suppresses no-overwrite notice when quiet=True """ nc_dataset.Conventions = 'CF-1.6' nc_dataset.history = 'foo' nc_tools.init_dataset_attrs(nc_dataset, 'Test Dataset', 'TestDatasetNotebook', 'test_dataset.nc', quiet=True) out, err = capsys.readouterr() assert out == '' assert nc_dataset.history == 'foo'
def _save_netcdf(day, tc, surges, forecast_flag, textfile, config, lats, lons): """Save the surge for a given day in a netCDF4 file.""" # Western open boundary (JdF) grid parameter values for NEMO startj, endj, r = 370, 470, 1 lengthj = endj - startj # netCDF4 file setup save_path = config["ssh"]["ssh dir"] filename = config["ssh"]["file template"].format(day) if forecast_flag: filepath = os.path.join(save_path, "fcst", filename) comment = "Prediction from Neah Bay storm surge website" else: filepath = os.path.join(save_path, "obs", filename) try: # Unlink file path in case it exists as a symlink to a fcst/ # file created byh upload_forcing worker because there was # no obs/ file os.unlink(filepath) except OSError: # File path does not exist pass comment = "Observation from Neah Bay storm surge website" comment = " ".join((comment, f"generated by SalishSeaCast {NAME} worker")) ssh_file = netCDF4.Dataset(filepath, "w") nc_tools.init_dataset_attrs( ssh_file, title="Neah Bay SSH hourly values", notebook_name="N/A", nc_filepath=filepath, comment=comment, quiet=True, ) ssh_file.source = os.fspath(textfile) ssh_file.references = f"https://github.com/SalishSeaCast/SalishSeaNowcast/blob/main/nowcast/workers/{NAME}.py" logger.debug(f"created western open boundary file {filepath}") # Create netCDF dimensions ssh_file.createDimension("time_counter", None) ssh_file.createDimension("yb", 1) ssh_file.createDimension("xbT", lengthj * r) # Create netCDF variables time_counter = ssh_file.createVariable("time_counter", "float32", "time_counter") time_counter.long_name = "Time axis" time_counter.axis = "T" time_counter.units = f"hour since 00:00:00 on {day:%Y-%m-%d}" # Latitudes and longitudes nav_lat = ssh_file.createVariable("nav_lat", "float32", ("yb", "xbT")) nav_lat.long_name = "Latitude" nav_lat.units = "degrees_north" nav_lon = ssh_file.createVariable("nav_lon", "float32", ("yb", "xbT")) nav_lon.long_name = "Longitude" nav_lon.units = "degrees_east" # Sea surface height sossheig = ssh_file.createVariable("sossheig", "float32", ("time_counter", "yb", "xbT"), zlib=True) sossheig.units = "m" sossheig.long_name = "Sea surface height" sossheig.grid = "SalishSea2" # Baroclinic u and v velocity components vobtcrtx = ssh_file.createVariable("vobtcrtx", "float32", ("time_counter", "yb", "xbT"), zlib=True) vobtcrtx.units = "m/s" vobtcrtx.long_name = "Barotropic U Velocity" vobtcrtx.grid = "SalishSea2" vobtcrty = ssh_file.createVariable("vobtcrty", "float32", ("time_counter", "yb", "xbT"), zlib=True) vobtcrty.units = "m/s" vobtcrty.long_name = "Barotropic V Velocity" vobtcrty.grid = "SalishSea2" # Boundary description for NEMO nbidta = ssh_file.createVariable("nbidta", "int32", ("yb", "xbT"), zlib=True) nbidta.long_name = "i grid position" nbidta.units = 1 nbjdta = ssh_file.createVariable("nbjdta", "int32", ("yb", "xbT"), zlib=True) nbjdta.long_name = "j grid position" nbjdta.units = 1 nbrdta = ssh_file.createVariable("nbrdta", "int32", ("yb", "xbT"), zlib=True) nbrdta.long_name = "position from boundary" nbrdta.units = 1 # Load values for ir in range(r): nav_lat[0, ir * lengthj:(ir + 1) * lengthj] = lats[startj:endj, ir] nav_lon[0, ir * lengthj:(ir + 1) * lengthj] = lons[startj:endj, ir] nbidta[0, ir * lengthj:(ir + 1) * lengthj] = ir nbjdta[0, ir * lengthj:(ir + 1) * lengthj] = range(startj, endj) nbrdta[0, ir * lengthj:(ir + 1) * lengthj] = ir for ib in range(lengthj * r): sossheig[:, 0, ib] = surges time_counter[:] = tc + 1 vobtcrtx[:, 0, ib] = numpy.zeros(len(surges)) vobtcrty[:, 0, ib] = numpy.zeros(len(surges)) ssh_file.close() try: lib.fix_perms(filepath) except PermissionError: # Can't change permissions/group because we don't own the file # but that's okay because we were able to write it above pass logger.debug(f"saved western open boundary file {filepath}") return filepath
def create_northern_tides_contd(Z1, Z2, tidevar, constituent, code, name='SalishSea2'): import netCDF4 as NC import numpy as np from salishsea_tools import nc_tools nemo = NC.Dataset(name + '_' + code + '_North_tide_' + constituent + '_grid_' + tidevar + '.nc', 'w', zlib=True) #start and end points starti = 32 endi = 62 lengthi = endi - starti # dataset attributes nc_tools.init_dataset_attrs( nemo, title='Tidal Boundary Conditions for Northern Boundary', notebook_name='johnstone_tides_contd', nc_filepath= '../../../NEMO-forcing/open_boundaries/north/tides/SalishSea2_North_tide_' + constituent + '_grid_' + tidevar + '.nc', comment= 'Tidal current and amplitude data scaled based on differences between K1/M2 and North observations and webtide.' ) # dimensions (only need x and y, don't need depth or time_counter) nemo.createDimension('xb', lengthi) nemo.createDimension('yb', 1) # variables # nbidta, ndjdta, ndrdta nbidta = nemo.createVariable('nbidta', 'int32', ('yb', 'xb')) nbidta.long_name = 'i grid position' nbidta.units = 1 nbjdta = nemo.createVariable('nbjdta', 'int32', ('yb', 'xb')) nbjdta.long_name = 'j grid position' nbjdta.units = 1 nbrdta = nemo.createVariable('nbrdta', 'int32', ('yb', 'xb')) nbrdta.long_name = 'position from boundary' nbrdta.units = 1 print(nbidta.shape) # add in the counter around the boundary (taken from Susan's code in Prepare Tide Files) xb = nemo.createVariable('xb', 'int32', ('xb', ), zlib=True) xb.units = 'non dim' xb.long_name = 'counter around boundary' yb = nemo.createVariable('yb', 'int32', ('yb', ), zlib=True) yb.units = 'non dim' yb.long_name = 'counter along boundary' yb[0] = 897 xb[:] = np.arange(starti, endi) # values # nbidta, nbjdta nbidta[:] = np.arange(starti, endi) nbjdta[:] = 897 nbrdta[:] = 1 if tidevar == 'T': z1 = nemo.createVariable('z1', 'float32', ('yb', 'xb'), zlib=True) z1.units = 'm' z1.long_name = 'tidal elevation: cosine' z2 = nemo.createVariable('z2', 'float32', ('yb', 'xb'), zlib=True) z2.units = 'm' z2.long_name = 'tidal elevation: sine' z1[0, :] = np.array([Z1] * lengthi) z2[0, :] = np.array([Z2] * lengthi) if tidevar == 'U': u1 = nemo.createVariable('u1', 'float32', ('yb', 'xb'), zlib=True) u1.units = 'm' u1.long_name = 'tidal x-velocity: cosine' u2 = nemo.createVariable('u2', 'float32', ('yb', 'xb'), zlib=True) u2.units = 'm' u2.long_name = 'tidal x-velocity: sine' u1[0, 0:lengthi] = Z1[:, 0] u2[0, 0:lengthi] = Z2[:, 0] if tidevar == 'V': v1 = nemo.createVariable('v1', 'float32', ('yb', 'xb'), zlib=True) v1.units = 'm' v1.long_name = 'tidal y-velocity: cosine' v2 = nemo.createVariable('v2', 'float32', ('yb', 'xb'), zlib=True) v2.units = 'm' v2.long_name = 'tidal y-velocity: sine' v1[0, 0:lengthi] = Z1[:, 0] v2[0, 0:lengthi] = Z2[:, 0] nc_tools.check_dataset_attrs(nemo) nemo.close()
lons.valid_range = (np.min(lons[:]), np.max(lons[:])) vars = ((src01, wgt01), (src02, wgt02), (src03, wgt03), (src04, wgt04)) for i, sw in enumerate(vars): s, w = sw sname = 'src{:02d}'.format(i + 1) wname = 'wgt{:02d}'.format(i + 1) s[:] = src.variables[sname][:] s.units = 1 s.long_name = '{} Grid Index {} (Flattened)'.format( atmos_grid_name, i + 1) s.valid_range = np.array( (np.min(src.variables[sname]), np.max(src.variables[sname]))) w[:] = src.variables[wname][:] w.units = 1 w.long_name = 'Salish Sea Grid Weights for {}'.format(sname) w.valid_range = np.array( (np.min(src.variables[wname]), np.max(src.variables[wname]))) nc_tools.init_dataset_attrs( weights, 'West Coast of Vancouver Island NEMO {} Atmospheric Forcing Interpolation Weights' .format(atmos_grid_name), [], netcdf4_weight, ) weights.history = history weights.source = "https://bitbucket.org/salishsea/analysis-michael/src/tip/weights/improveweights.py"
def _save_netcdf( day, tc, surges, forecast_flag, textfile, config, lats, lons, ): """Save the surge for a given day in a netCDF4 file. """ # Western open boundary (JdF) grid parameter values for NEMO startj, endj, r = 384, 471, 1 lengthj = endj - startj # netCDF4 file setup save_path = config['ssh']['ssh dir'] filename = config['ssh']['file template'].format(day) if forecast_flag: filepath = os.path.join(save_path, 'fcst', filename) comment = 'Prediction from Neah Bay storm surge website' else: filepath = os.path.join(save_path, 'obs', filename) try: # Unlink file path in case it exists as a symlink to a fcst/ # file created byh upload_forcing worker because there was # no obs/ file os.unlink(filepath) except OSError: # File path does not exist pass comment = 'Observation from Neah Bay storm surge website' comment = ' '.join( (comment, f'generated by Salish Sea NEMO nowcast {NAME} worker')) ssh_file = nc.Dataset(filepath, 'w') nc_tools.init_dataset_attrs( ssh_file, title='Neah Bay SSH hourly values', notebook_name='N/A', nc_filepath=filepath, comment=comment, quiet=True, ) ssh_file.source = textfile ssh_file.references = ( f'https://bitbucket.org/salishsea/tools/src/tip/SalishSeaNowcast/' f'nowcast/workers/{NAME}.py') logger.debug(f'created western open boundary file {filepath}') # Create netCDF dimensions ssh_file.createDimension('time_counter', None) ssh_file.createDimension('yb', 1) ssh_file.createDimension('xbT', lengthj * r) # Create netCDF variables time_counter = ssh_file.createVariable('time_counter', 'float32', ('time_counter')) time_counter.long_name = 'Time axis' time_counter.axis = 'T' time_counter.units = f'hour since 00:00:00 on {day:%Y-%m-%d}' # Latitudes and longitudes nav_lat = ssh_file.createVariable('nav_lat', 'float32', ('yb', 'xbT')) nav_lat.long_name = 'Latitude' nav_lat.units = 'degrees_north' nav_lon = ssh_file.createVariable('nav_lon', 'float32', ('yb', 'xbT')) nav_lon.long_name = 'Longitude' nav_lon.units = 'degrees_east' # Sea surface height sossheig = ssh_file.createVariable('sossheig', 'float32', ('time_counter', 'yb', 'xbT'), zlib=True) sossheig.units = 'm' sossheig.long_name = 'Sea surface height' sossheig.grid = 'SalishSea2' # Baroclinic u and v velocity components vobtcrtx = ssh_file.createVariable('vobtcrtx', 'float32', ('time_counter', 'yb', 'xbT'), zlib=True) vobtcrtx.units = 'm/s' vobtcrtx.long_name = 'Barotropic U Velocity' vobtcrtx.grid = 'SalishSea2' vobtcrty = ssh_file.createVariable('vobtcrty', 'float32', ('time_counter', 'yb', 'xbT'), zlib=True) vobtcrty.units = 'm/s' vobtcrty.long_name = 'Barotropic V Velocity' vobtcrty.grid = 'SalishSea2' # Boundary description for NEMO nbidta = ssh_file.createVariable('nbidta', 'int32', ('yb', 'xbT'), zlib=True) nbidta.long_name = 'i grid position' nbidta.units = 1 nbjdta = ssh_file.createVariable('nbjdta', 'int32', ('yb', 'xbT'), zlib=True) nbjdta.long_name = 'j grid position' nbjdta.units = 1 nbrdta = ssh_file.createVariable('nbrdta', 'int32', ('yb', 'xbT'), zlib=True) nbrdta.long_name = 'position from boundary' nbrdta.units = 1 # Load values for ir in range(r): nav_lat[0, ir * lengthj:(ir + 1) * lengthj] = lats[startj:endj, ir] nav_lon[0, ir * lengthj:(ir + 1) * lengthj] = lons[startj:endj, ir] nbidta[0, ir * lengthj:(ir + 1) * lengthj] = ir nbjdta[0, ir * lengthj:(ir + 1) * lengthj] = range(startj, endj) nbrdta[0, ir * lengthj:(ir + 1) * lengthj] = ir for ib in range(lengthj * r): sossheig[:, 0, ib] = surges time_counter[:] = tc + 1 vobtcrtx[:, 0, ib] = np.zeros(len(surges)) vobtcrty[:, 0, ib] = np.zeros(len(surges)) ssh_file.close() try: os.chmod(filepath, FilePerms(user='******', group='rw', other='r')) except PermissionError: # Can't change permissions/group because we don't own the file # but that's okay because we were able to write it above pass logger.debug(f'saved western open boundary file {filepath}') return filepath
def test_init_dataset_attrs(mock_nhu, mock_nfhu, nc_dataset): """init_dataset_attrs initializes dataset global attrs """ nc_tools.init_dataset_attrs(nc_dataset, 'Test Dataset', 'TestDatasetNotebook', 'test_dataset.nc') assert nc_dataset.Conventions == 'CF-1.6'
def save_netcdf_TS(times, Ts, Ss, Ws, sshs, depthst, depthsw, name, lons, lats, to, tf): """Saves the u/v time series over volume into a netcdf file""" path = '/ocean/nsoontie/MEOPAR/TidalEllipseData/ModelTimeSeries/' fname = '{}_TS_{}_{}.nc'.format(name, to.strftime('%Y%m%d'), tf.strftime('%Y%m%d')) nc_file = nc.Dataset(os.path.join(path, fname), 'w', zlib=True) # dataset attributes nc_tools.init_dataset_attrs( nc_file, title='{} TS, w, ssh'.format(name), notebook_name='N/A', nc_filepath=os.path.join(path, fname), comment='Generated for tidal and energy analysis') # dimensions nc_file.createDimension('time_counter', None) nc_file.createDimension('deptht', Ts.shape[1]) nc_file.createDimension('y', Ts.shape[2]) nc_file.createDimension('x', Ts.shape[3]) nc_file.createDimension('depthw', Ws.shape[1]) # variables # time_counter time_counter = nc_file.createVariable('time_counter', 'float64', ('time_counter')) time_counter.long_name = 'Time axis' time_counter.axis = 'T' time_counter.units = 'hour since {}'.format(NodalCorr['reftime']) # lat, lon lon = nc_file.createVariable('nav_lon', 'float32', ('y', 'x'), zlib=True) lon[:] = lons[:] lat = nc_file.createVariable('nav_lat', 'float32', ('y', 'x'), zlib=True) lat[:] = lats[:] # T T = nc_file.createVariable('votemper', 'float32', ('time_counter', 'deptht', 'y', 'x'), zlib=True) T.units = 'deg C' T.long_name = 'Temperature' T.coordinates = 'time_counter, deptht' # S S = nc_file.createVariable('vosaline', 'float32', ('time_counter', 'deptht', 'y', 'x'), zlib=True) S.units = '[psu]' S.long_name = 'Practical Salinity' S.coordinates = 'time_counter, deptht' # W W = nc_file.createVariable('vovecrtz', 'f8', ('time_counter', 'depthw', 'y', 'x'), zlib=True, least_significant_digit=9) W.units = 'm/s' W.long_name = 'Vertical Velocity' W.coordinates = 'time_counter, depthw' # SSH SSH = nc_file.createVariable('sossheig', 'float32', ('time_counter', 'y', 'x'), zlib=True) SSH.units = 'm' SSH.long_name = 'Sea Surface height' SSH.coordinates = 'time_counter' # deptht depth = nc_file.createVariable('deptht', 'float32', ('deptht'), zlib=True) depth.units = 'm' depth.long_name = 'Depth' depth.coordinates = 'deptht' # depthw depthw = nc_file.createVariable('depthw', 'float32', ('depthw'), zlib=True) depthw.units = 'm' depthw.long_name = 'Depth' depthw.coordinates = 'depthw' T[:] = Ts S[:] = Ss depth[:] = depthst depthw[:] = depthsw time_counter[:] = times SSH[:] = sshs W[:] = Ws nc_file.close()
continue print("Vertical Interpolation to WCVI depth levels successful") print("Now writing into a binary file to be used as IC for NEMO") file_temp = nc.Dataset( '/ocean/ssahu/CANYONS/wcvi/initial_conditions/West_coast_NEMO_IC_high_resolution_wide_bdy.nc', 'w', zlib=True) # dataset attributes nc_tools.init_dataset_attrs( file_temp, title='Temperature and salinity Initial Condition', notebook_name='Making_IC_from_JP', nc_filepath= '/ocean/ssahu/CANYONS/wcvi/initial_conditions/West_coast_NEMO_IC_high_resolution.nc', comment= 'Temperature and salinity from JP Model, high_resolution__grid; used at all grid points and interpolated vertically' ) file_temp.createDimension('xb', votemper_NEMO.shape[2]) file_temp.createDimension('yb', votemper_NEMO.shape[1]) file_temp.createDimension('deptht', votemper_NEMO.shape[0]) file_temp.createDimension('time_counter', None) nav_lat = file_temp.createVariable('nav_lat', 'float32', ('yb', 'xb')) nav_lat.long_name = 'Latitude' nav_lat.units = 'degrees_north' nav_lon = file_temp.createVariable('nav_lon', 'float32', ('yb', 'xb'))
lons.units = 'degrees_east' lons.long_name = 'Longitude' lons.valid_range = (np.min(lons[:]), np.max(lons[:])) vars = ((src01, wgt01), (src02, wgt02), (src03, wgt03), (src04, wgt04)) for i, sw in enumerate(vars): s, w = sw sname = 'src{:02d}'.format(i+1) wname = 'wgt{:02d}'.format(i+1) s[:] = src.variables[sname][:] s.units = 1 s.long_name = '{} Grid Index {} (Flattened)'.format(atmos_grid_name,i+1) s.valid_range = np.array( (np.min(src.variables[sname]), np.max(src.variables[sname]))) w[:] = src.variables[wname][:] w.units = 1 w.long_name = 'Salish Sea Grid Weights for {}'.format(sname) w.valid_range = np.array( (np.min(src.variables[wname]), np.max(src.variables[wname]))) nc_tools.init_dataset_attrs( weights, 'Salish Sea AGRIF NEMO {} Atmospheric Forcing Interpolation Weights'.format(atmos_grid_name), [], netcdf4_weight, ) weights.history = history