def write_initial_positions(index, drifter, grid): bathy, nav_lon, nav_lat = tidetools.get_bathy_data(grid) xind, yind = tidetools.find_closest_model_point(drifter['lon'][index], drifter['lat'][index], nav_lon, nav_lat, bathy, allow_land=True) xp, yp = find_start_point(nav_lon, nav_lat, xind, yind, drifter, index) initial_conditions = np.ones((81, 5)) # longitude index initial_conditions[0:-1:3, 0] = yp initial_conditions[1:-1:3, 0] = yp - 2.5 initial_conditions[2:81:3, 0] = yp + 2.5 # latitude index for i in range(0, 81, 9): initial_conditions[0 + i:3 + i, 1] = xp + 0.5 initial_conditions[0 + 3 + i:3 + 3 + i, 1] = xp - 2 initial_conditions[0 + 6 + i:3 + 6 + i, 1] = xp + 3 # depth initial_conditions[0:27, 2] = -1.5 initial_conditions[27:54, 2] = -2.5 initial_conditions[54:81, 2] = -3.5 # time tp = (drifter['time'][index].hour + drifter['time'][index].minute / 60. + drifter['time'][index].second / 3600.) for i in range(0, 81, 27): initial_conditions[0 + i:9 + i, 3] = tp initial_conditions[0 + 9 + i:9 + 9 + i, 3] = tp + 0.5 initial_conditions[0 + 18 + i:9 + 18 + i, 3] = tp - 0.5 np.savetxt('initial_positions.txt', initial_conditions, fmt='%10.5f')
def retrieve_hindcast_data(lon, lat, date, obs_depth, field, grid_B, mesh_mask): """Gather nowcast field daily mean, min and max at lat, lon on date, interpolated to obs_depth. :arg lon: longitude point :type lon: real number :arg lat: latitude point :type lat: real number :arg date: simulation date :type date: datetime :arg obs_depth: array of depths to be interpolated to :type obs_depth: numpy array :arg field: name of variable to load, e.g 'vosaline' or 'votemper' :type field: string :arg grid_B: model bathymetry :type grid_B: netCDF4 object :arg mesh_mask: model mesh mask :type mesh_mask: netCDF4 object :returns: model_d_interp, model_max, model_min - numpy arrays """ # look up model grid point bathy, lons, lats = tidetools.get_bathy_data(grid_B) j, i = geo_tools.find_closest_model_point(lon, lat, lons, lats, land_mask = bathy.mask) # loading grid_d = results_dataset2('1d', 'grid_T', date) grid_h = results_dataset2('1h', 'grid_T', date) model_d = grid_d.variables[field][0, :, j, i] model_h = grid_h.variables[field][:, :, j, i] if 'gdept' in mesh_mask.variables.keys(): gdep = mesh_mask.variables['gdept'][0, :, j, i] else: gdep = mesh_mask.variables['gdept_0'][0, :, j, i] # masking tmask = mesh_mask.variables['tmask'][:, :, j, i] tmask = 1 - tmask + np.zeros(model_h.shape) model_d = np.ma.array(model_d, mask=tmask[0, :]) gdep_mask = np.ma.array(gdep, mask=tmask[0, :]) model_h = np.ma.array(model_h, mask=tmask) # interpolate to observed depth model_d_interp = comparisons.interpolate_depth(model_d, gdep_mask, obs_depth) model_h_interp = np.zeros((model_h.shape[0], len(obs_depth))) for t in np.arange(model_h.shape[0]): model_h_interp[t, :] = comparisons.interpolate_depth(model_h[t, :], gdep_mask, obs_depth) # daily max and min model_max = np.max(model_h_interp, axis=0) model_min = np.min(model_h_interp, axis=0) return model_d_interp, model_max, model_min
def get_model_time_series(station, fnames, grid_B, mesh_mask, nemo_36=True): """Retrieve the density, salinity and temperature time series at a station. Time series is created from files listed in fnames""" if nemo_36: depth_var = 'gdept_0' depth_var_w = 'gdepw_0' else: depth_var = 'gdept' depth_var_w = 'gdepw' # station info lon = places.PLACES[station]['lon lat'][0] lat = places.PLACES[station]['lon lat'][1] depth = places.PLACES[station]['depth'] # model corresponding locations and variables bathy, X, Y = tidetools.get_bathy_data(grid_B) j, i = geo_tools.find_closest_model_point(lon, lat, X, Y, land_mask=bathy.mask) model_depths = mesh_mask.variables[depth_var][0, :, j, i] tmask = mesh_mask.variables['tmask'][0, :, j, i] wdeps = mesh_mask.variables[depth_var_w][0, :, j, i] sal, time = analyze.combine_files(fnames, 'vosaline', 'None', j, i) temp, time = analyze.combine_files(fnames, 'votemper', 'None', j, i) # interpolate: sal_interp = np.array([ shared.interpolate_tracer_to_depths(sal[d, :], model_depths, depth, tmask, wdeps) for d in range(sal.shape[0]) ]) temp_interp = np.array([ shared.interpolate_tracer_to_depths(temp[d, :], model_depths, depth, tmask, wdeps) for d in range(temp.shape[0]) ]) # convert to psu for using density function return sal_interp, temp_interp, time
import matplotlib.pyplot as plt import netCDF4 as nc import numpy as np import matplotlib.patches as patches from salishsea_tools import viz_tools, geo_tools, tidetools from bathy_helpers import * grid = nc.Dataset('/data/vdo/MEOPAR/NEMO-forcing/grid/bathy_meter_SalishSea2.nc') result = nc.Dataset('/ocean/vdo/MEOPAR/ariane-runs/monthlong/ariane_trajectories_qualitative.nc') latt = result.variables['traj_lat'] lont = result.variables['traj_lon'] bathy, lons, lats = tidetools.get_bathy_data(grid) mask = lont[:].mask with nc.Dataset('/home/mdunphy/MEOPAR/NEMO-forcing/grid/coordinates_seagrid_SalishSea201702.nc', 'r') as cnc: glamf = cnc.variables['glamf'][0,...]; gphif = cnc.variables['gphif'][0,...] glamt = cnc.variables['glamt'][0,...]; gphit = cnc.variables['gphit'][0,...] NY, NX = glamt.shape[0], glamt.shape[1] glamfe, gphife = expandf(glamf, gphif) def still_inside(time, number): number_of_particles = np.zeros(time) for n in range(time): for m in range(number): if (mask[n,m]) == False: y,x = geo_tools.find_closest_model_point(lont[n,m],latt[n,m],lons, lats, land_mask=bathy.mask) if (598<y<658) and (118<x<134): number_of_particles[n] = number_of_particles[n] + 1 return number_of_particles def still_inside2(time):
def plot_files(ax, grid_B, files, var, depth, t_orig, t_final, name, label, colour): """Plots values of variable over multiple files covering a certain period of time. :arg ax: The axis where the variable is plotted. :type ax: axis object :arg grid_B: Bathymetry dataset for the Salish Sea NEMO model. :type grid_B: :class:`netCDF4.Dataset` :arg files: Multiple result files in chronological order. :type files: list :arg var: Name of variable (sossheig = sea surface height, vosaline = salinity, votemper = temperature, vozocrtx = Velocity U-component, vomecrty = Velocity V-component). :type var: string :arg depth: Depth of model results ('None' if var=sossheig). :type depth: integer or string :arg t_orig: The beginning of the date range of interest. :type t_orig: datetime object :arg t_final: The end of the date range of interest. :type t_final: datetime object :arg name: The name of the station. :type name: string :arg label: Label for plot line. :type label: string :arg colour: Colour of plot lines. :type colour: string :returns: matplotlib figure object instance (fig) and axis object (ax). """ # Stations information lat = figures.SITES[name]['lat'] lon = figures.SITES[name]['lon'] # Bathymetry bathy, X, Y = tidetools.get_bathy_data(grid_B) # Get index [j, i] = geo_tools.find_closest_model_point(lon, lat, X, Y, land_mask=bathy.mask) # Call function var_ary, time = combine_files(files, var, depth, j, i) # Plot ax.plot(time, var_ary, label=label, color=colour, linewidth=2.5) # Figure format ax_start = t_orig ax_end = t_final + datetime.timedelta(days=1) ax.set_xlim(ax_start, ax_end) hfmt = mdates.DateFormatter('%m/%d %H:%M') ax.xaxis.set_major_formatter(hfmt) return ax
def get_error_model(names, runs_list, grid_B, t_orig): """ Sets up the calculation for the model residual error. :arg names: Names of station. :type names: list of strings :arg runs_list: Runs that have been verified as complete. :type runs_list: list :arg grid_B: Bathymetry dataset for the Salish Sea NEMO model. :type grid_B: :class:`netCDF4.Dataset` :arg t_orig: Date being considered. :type t_orig: datetime object :returns: error_mod_dict, t_mod_dict, t_orig_dict """ bathy, X, Y = tidetools.get_bathy_data(grid_B) t_orig_obs = t_orig + datetime.timedelta(days=-1) t_final_obs = t_orig + datetime.timedelta(days=1) # truncation times sdt = t_orig.replace(tzinfo=tz.tzutc()) edt = sdt + datetime.timedelta(days=1) error_mod_dict = {} t_mod_dict = {} for name in names: error_mod_dict[name] = {} t_mod_dict[name] = {} # Look up model grid lat = figures.SITES[name]['lat'] lon = figures.SITES[name]['lon'] j, i = geo_tools.find_closest_model_point(lon, lat, X, Y, land_mask=bathy.mask) # Observed residuals and wlevs and tides ttide = figures.shared.get_tides(name, path=paths['tides']) res_obs, wlev_meas = obs_residual_ssh(name, ttide, t_orig_obs, t_final_obs) res_obs_trun, time_obs_trun = analyze.truncate_data( np.array(res_obs), np.array(wlev_meas.time), sdt, edt) for mode in runs_list: filename, run_date = analyze.create_path( mode, t_orig, 'SalishSea_1h_*_grid_T.nc') grid_T = nc.Dataset(filename) res_mod, t_model, ssh_corr, ssh_mod = model_residual_ssh( grid_T, j, i, ttide) # Truncate res_mod_trun, t_mod_trun = analyze.truncate_data( res_mod, t_model, sdt, edt) # Error error_mod = analyze.calculate_error(res_mod_trun, t_mod_trun, res_obs_trun, time_obs_trun) error_mod_dict[name][mode] = error_mod t_mod_dict[name][mode] = t_mod_trun return error_mod_dict, t_mod_dict
def plot_residual_model(axs, names, runs_list, grid_B, t_orig): """ Plots the observed sea surface height residual against the sea surface height model residual (calculate_residual) at specified stations. Function may produce none, any, or all (nowcast, forecast, forecast 2) model residuals depending on availability for specified date (runs_list). :arg ax: The axis where the residuals are plotted. :type ax: list of axes :arg names: Names of station. :type names: list of names :arg runs_list: Runs that have been verified as complete. :type runs_list: list :arg grid_B: Bathymetry dataset for the Salish Sea NEMO model. :type grid_B: :class:`netCDF4.Dataset` :arg t_orig: Date being considered. :type t_orig: datetime object """ bathy, X, Y = tidetools.get_bathy_data(grid_B) t_orig_obs = t_orig + datetime.timedelta(days=-1) t_final_obs = t_orig + datetime.timedelta(days=1) # truncation times sdt = t_orig.replace(tzinfo=tz.tzutc()) edt = sdt + datetime.timedelta(days=1) for ax, name in zip(axs, names): # Identify model grid point lat = figures.SITES[name]['lat'] lon = figures.SITES[name]['lon'] j, i = geo_tools.find_closest_model_point(lon, lat, X, Y, land_mask=bathy.mask) # Observed residuals and wlevs and tides ttide = figures.shared.get_tides(name, path=paths['tides']) res_obs, wlev_meas = obs_residual_ssh(name, ttide, t_orig_obs, t_final_obs) # truncate and plot res_obs_trun, time_obs_trun = analyze.truncate_data( np.array(res_obs), np.array(wlev_meas.time), sdt, edt) ax.plot(time_obs_trun, res_obs_trun, c=colours['observed'], lw=2.5, label='observed') for mode in runs_list: filename, run_date = analyze.create_path( mode, t_orig, 'SalishSea_1h_*_grid_T.nc') grid_T = nc.Dataset(filename) res_mod, t_model, ssh_corr, ssh_mod = model_residual_ssh( grid_T, j, i, ttide) # truncate and plot res_mod_trun, t_mod_trun = analyze.truncate_data( res_mod, t_model, sdt, edt) ax.plot(t_mod_trun, res_mod_trun, label=mode, c=colours[mode], lw=2.5) ax.set_title('Comparison of modelled sea surface height residuals at' ' {station}: {t:%d-%b-%Y}'.format(station=name, t=t_orig))
def compare_VENUS(station, grid_T, grid_B, figsize=(6, 10)): """Compares the model's temperature and salinity with observations from VENUS station. :arg station: Name of the station ('East' or 'Central') :type station: string :arg grid_T: Hourly tracer results dataset from NEMO. :type grid_T: :class:`netCDF4.Dataset` :arg grid_B: Bathymetry dataset for the Salish Sea NEMO model. :type grid_B: :class:`netCDF4.Dataset` :arg figsize: Figure size (width, height) in inches. :type figsize: 2-tuple :returns: matplotlib figure object instance (fig). """ # Time range t_orig, t_end, t = figures.get_model_time_variables(grid_T) # Bathymetry bathy, X, Y = tt.get_bathy_data(grid_B) # VENUS data fig, (ax_sal, ax_temp) = plt.subplots(2, 1, figsize=figsize, sharex=True) fig.patch.set_facecolor('#2B3E50') fig.autofmt_xdate() lon = SITES['VENUS'][station]['lon'] lat = SITES['VENUS'][station]['lat'] depth = SITES['VENUS'][station]['depth'] # Plotting observations plot_VENUS(ax_sal, ax_temp, station, t_orig, t_end) # Grid point of VENUS station [j, i] = geo_tools.find_closest_model_point( lon, lat, X, Y) # Model data sal = grid_T.variables['vosaline'][:, :, j, i] temp = grid_T.variables['votemper'][:, :, j, i] ds = grid_T.variables['deptht'] # Interpolating data salc = [] tempc = [] for ind in np.arange(0, sal.shape[0]): salc.append(figures.interpolate_depth(sal[ind, :], ds, depth)) tempc.append(figures.interpolate_depth(temp[ind, :], ds, depth)) # Plot model data ax_sal.plot(t, salc, '-b', label='Model') ax_temp.plot(t, tempc, '-b', label='Model') # Axis ax_sal.set_title(f'VENUS {station} - {t[0].strftime("%d-%b-%Y")}') ax_sal.set_ylim([29, 32]) ax_sal.set_ylabel('Practical Salinity [psu]', **axis_font) ax_sal.legend(loc=0) ax_temp.set_ylim([7, 13]) ax_temp.set_xlabel('Time [UTC]', **axis_font) ax_temp.set_ylabel('Temperature [deg C]', **axis_font) figures.axis_colors(ax_sal, 'gray') figures.axis_colors(ax_temp, 'gray') # Text box ax_temp.text(0.25, -0.3, 'Observations from Ocean Networks Canada', transform=ax_temp.transAxes, color='white') return fig