def neutralDepth(self, s, t, p, debug=False, searchrange=100, depthname=None): at = np.where(np.asarray(self.ipres) == p)[0][0] depths = (np.asarray(self.ipres[:]) + p) / 2.0 selfdensities = gsw.rho(self.isals[:],\ self.itemps[:],\ depths) refdensities = gsw.rho([s]*(len(depths)),\ [t]*(len(depths)),\ depths) Es = selfdensities - refdensities zero_crossings = np.where(np.diff(np.sign(Es)))[0] smallest = np.argmin(np.abs(Es)) if len(zero_crossings) >= 1: if abs(self.ipres[zero_crossings[0]] - self.ipres[zero_crossings[-1]]) > 100: return [np.nan, np.nan, np.nan, np.nan] a = np.asarray(zero_crossings) #print("More than one crossing") return np.mean(self.isals[a]), np.mean(self.itemps[a]), np.mean( np.asarray(self.ipres)[a]), np.mean(self.igamma[a]) else: return [np.nan, np.nan, np.nan, np.nan]
def neutralDepthInSituAnom(self,p2,depth,debug=False,searchrange=100,depthname=None): depth = int(depth) plowerbound = max(self.ipres[0],p2.ipres[0]) pupperbound = min(self.ipres[-1],p2.ipres[-1]) at = np.where(np.asarray(self.ipres) == depth)[0][0] prange = pupperbound - plowerbound p2offset = p2.ipres[0] - plowerbound selfoffset = self.ipres[0] - plowerbound #print(self.ipres,p2.ipres) if self.ipres[-1] < p2.ipres[0] or p2.ipres[-1] <self.ipres[0]: return None if self.ipres[0] < p2.ipres[0] : p2offset = 0 selfoffset = np.where(np.asarray(self.ipres) == p2.ipres[0])[0][0] elif p2.ipres[0] < self.ipres[0]: selfoffset = 0 p2offset = np.where(np.asarray(p2.ipres) == self.ipres[0])[0][0] else: selfoffset = 0 p2offset = 0 if p2.ipres[p2offset] != self.ipres[selfoffset]: print(p2.ipres[p2offset],self.ipres[selfoffset]) depths = np.asarray(p2.ipres[p2offset:p2offset+prange]) p2densities = gsw.rho(p2.isals[p2offset:p2offset+len(depths)],\ p2.itemps[p2offset:p2offset+len(depths)],\ depths) selfdensities = gsw.rho([self.isals[at]]*(len(depths)),\ [self.itemps[at]]*(len(depths)),\ depths) minlen = min(len(p2densities),len(selfdensities)) Es = p2densities[:minlen]-selfdensities[:minlen] if len(Es)<2: return None zero_crossings = np.where(np.diff(np.sign(Es)))[0] smallest = np.argmin(np.abs(Es)) if len(zero_crossings)>=1 : if abs(p2.ipres[zero_crossings[0]] - p2.ipres[zero_crossings[-1]])>100: return None a =np.asarray(p2offset+zero_crossings) sol = np.asarray(p2.ipres)[a] if len(sol) == 1 or sol[-1]-sol[0] < 40: plt.plot(depths,Es) plt.show() p2.neutraldepth[depthname] = np.mean(np.asarray(p2.ipres)[a]) return p2.neutraldepth[depthname] else: print("More than one crossing {}".format(sol[-1]-sol[0] )) return None else: return None
def calculate_density(T, p, C, lat, long): """Calculates density from temp, pressure, conductivity, and lat/long. All parameters and output are float or array-like. :param T: temperature (deg C) :param p: pressure (bar) :param C: conductivity (S/m) :param lat: latitude (decimal deg) :param long: longitude (decimal deg) :return: density (kg/m^3) """ # pressure in dbars = pressure * 10 if type(p) == float: p_dbar = p * 10 else: p_dbar = [pi * 10 for pi in p] # conductivity in mS/cm = conductivity * 10 if type(C) == float: C_mScm = C * 10 else: C_mScm = [Ci * 10 for Ci in C] # calculate SP from conductivity (mS/cm), in-situ temperature (deg C), and gauge pressure (dbar) SP = gsw.SP_from_C(C_mScm, T, p_dbar) # calculate SA from SP (unitless), gauge pressure (dbar), longitude and latitude (decimal degrees) SA = gsw.SA_from_SP(SP, p_dbar, long, lat) # calculate CT from SA (g/kg), in-situ temperature (deg C), and gauge pressure (dbar) CT = gsw.CT_from_t(SA, T, p_dbar) # calculate density return gsw.rho(SA, CT, p_dbar)
def water_properties(sp, t, p, lon=0.0, lat=0.0): """ Calculates seawater density and sound speed using the TEOS-10 equations. Uses the gsw toolbox, which is an implementation of the TEOS-10 equations. Args: :sp: Practical salinity [PSU] :t: Temperature [degC] :p: Pressure [dbar] :lon: Longitude (optional - use named parameters) [decimal degrees] :lat: Latitude (optional - use named parameters) [decimal degrees] Returns: :c: speed of sound in water [m/s] :rho: density of water [kg/m^3] Notes: The accuracy of sound speed and density that we need is such that it doesn't matter what latitude and longitude is used. """ sa = gsw.SA_from_SP(sp, p, lon, lat) ct = gsw.CT_from_t(sa, t, p) c = gsw.sound_speed(sa, ct, p) rho = gsw.rho(sa, ct, p) return c, rho
def SWdensityFromCTD(SA, t, p, potential=False): """Calculate seawater density at CTD depth Args ---- SA: ndarray Absolute salinity, g/kg t: ndarray In-situ temperature (ITS-90), degrees C p: ndarray Sea pressure (absolute pressure minus 10.1325 dbar), dbar Returns ------- rho: ndarray Seawater density, in-situ or potential, kg/m^3 """ import numpy import gsw CT = gsw.CT_from_t(SA, t, p) # Calculate potential density (0 bar) instead of in-situ if potential: p = numpy.zeros(len(SA)) return gsw.rho(SA, CT, p)
def _validate_transforms(self, rdt_in, rdt_out): #passthrus self.assertTrue(np.allclose(rdt_in['time'], rdt_out['time'])) self.assertTrue(np.allclose(rdt_in['lat'], rdt_out['lat'])) self.assertTrue(np.allclose(rdt_in['lon'], rdt_out['lon'])) self.assertTrue(np.allclose(rdt_in['TEMPWAT_L0'], rdt_out['TEMPWAT_L0'])) self.assertTrue(np.allclose(rdt_in['CONDWAT_L0'], rdt_out['CONDWAT_L0'])) self.assertTrue(np.allclose(rdt_in['PRESWAT_L0'], rdt_out['PRESWAT_L0'])) # TEMPWAT_L1 = (TEMPWAT_L0 / 10000) - 10 t1 = (rdt_out['TEMPWAT_L0'] / 10000) - 10 self.assertTrue(np.allclose(rdt_out['TEMPWAT_L1'], t1)) # CONDWAT_L1 = (CONDWAT_L0 / 100000) - 0.5 c1 = (rdt_out['CONDWAT_L0'] / 100000) - 0.5 self.assertTrue(np.allclose(rdt_out['CONDWAT_L1'], c1)) # Equation uses p_range, which is a calibration coefficient - Fixing to 679.34040721 # PRESWAT_L1 = (PRESWAT_L0 * p_range / (0.85 * 65536)) - (0.05 * p_range) p1 = (rdt_out['PRESWAT_L0'] * 679.34040721 / (0.85 * 65536)) - (0.05 * 679.34040721) self.assertTrue(np.allclose(rdt_out['PRESWAT_L1'], p1)) # PRACSAL = gsw.SP_from_C((CONDWAT_L1 * 10), TEMPWAT_L1, PRESWAT_L1) ps = gsw.SP_from_C((rdt_out['CONDWAT_L1'] * 10.), rdt_out['TEMPWAT_L1'], rdt_out['PRESWAT_L1']) self.assertTrue(np.allclose(rdt_out['PRACSAL'], ps)) # absolute_salinity = gsw.SA_from_SP(PRACSAL, PRESWAT_L1, longitude, latitude) # conservative_temperature = gsw.CT_from_t(absolute_salinity, TEMPWAT_L1, PRESWAT_L1) # DENSITY = gsw.rho(absolute_salinity, conservative_temperature, PRESWAT_L1) abs_sal = gsw.SA_from_SP(rdt_out['PRACSAL'], rdt_out['PRESWAT_L1'], rdt_out['lon'], rdt_out['lat']) cons_temp = gsw.CT_from_t(abs_sal, rdt_out['TEMPWAT_L1'], rdt_out['PRESWAT_L1']) rho = gsw.rho(abs_sal, cons_temp, rdt_out['PRESWAT_L1']) self.assertTrue(np.allclose(rdt_out['DENSITY'], rho))
def rho(SP, t, p, lon=-45.401666, lat=-23.817233): """ Calculates in situ density from practical salinity, in situ temperature and pressure (depth). Parameters ---------- SP : array like Salinity (PSS-78) [1e-3] t : array like Temperature (ITS-90) [degC] p : array like Pressure [dbar] lon : array like, float, optional Longitude, decimal degrees east lat : array like, float, optional Latitude, decimal degrees north Returns ------- rho : array like In situ density [kg m-3] """ # Calculates Absolute Salinity from Practical Salinity SA = gsw.SA_from_SP(SP, p, lon, lat) # Calcualtes Conservative Temperature from in situ temperature CT = gsw.CT_from_t(SA, t, p) # Calculates and returns density return gsw.rho(SA, CT, p)
def water_data(self): """Compute averages of the water-quality data.""" # TODO: It is strongly recommended to limit the range of the # data in the netcdf valid_range attribute. # compute variables from RBR pH = simple_despike(self.rbr["ph"]) Sw = simple_despike(self.rbr["salinity"]) Tw = simple_despike(self.rbr["temperature"]) Cw = simple_despike(self.rbr["conductivity"]) depth = simple_despike(self.rbr["depth"]) rhow = gsw.rho(Sw, Tw, depth) dissoxy = simple_despike(self.rbr["dissoxy"]) # save data in the output dictionary list_of_variables = { "pH": "pH", "Sw": "water_salinity", "Tw": "water_temperature", "Cw": "water_conductivity", "rhow": "water_density", "depth": "water_depth", "dissoxy": "dissolved_oxygen", } # for k, v in list_of_variables.items(): self.r[k] = eval(k) # # append to global list of variables self.list_of_variables = { **self.list_of_variables, **list_of_variables }
def rho_sp(cond, temp, pres, lat=46.0, lon=-124.5): """density and salinity from a CTD. Returns in-situ density and practical salinity from conductivity, temperature, pressure, latitude, and longitude as reported from any standard CTD. Usage: Rho, SP = rho_sp(cond, temp, pres, lat, lon) where Rho = in-situ density, [kg/m^3] SP = practical salinity, [pss] cond = conductivity, [mS/cm] temp = temperature, [deg C] pres = pressure, [dbar] lat = latitude, decimal degrees +N lon = longitude, decimal degrees +E """ SP = gsw.SP_from_C(cond, temp, pres) SA = gsw.SA_from_SP(SP, pres, lon, lat) CT = gsw.CT_from_t(SA, temp, pres) Rho = gsw.rho(SA, CT, pres) return Rho, SP
def density(dataset, salinity, temperature, pressure): """Calculate in-situ density. This function calculated in-situ density from absolute salinity and conservative temperature, using the `gsw.rho` function. Returns a new sequence with the data. """ # find sequence for sequence in walk(dataset, SequenceType): break else: raise ConstraintExpressionError( 'Function "bounds" should be used on a Sequence.') selection = sequence[salinity.name, temperature.name, pressure.name] rows = [tuple(row) for row in selection.iterdata()] data = np.rec.fromrecords( rows, names=['salinity', 'temperature', 'pressure']) rho = gsw.rho(data['salinity'], data['temperature'], data['pressure']) out = SequenceType("result") out['rho'] = BaseType("rho", units="kg/m**3") out.data = np.rec.fromrecords(rho.reshape(-1, 1), names=['rho']) return out
def calculate_density(temperature, pressure, salinity, latitude, longitude): """Calculates density given glider practical salinity, pressure, latitude, and longitude using Gibbs gsw SA_from_SP and rho functions. Parameters: temperature (C), pressure (dbar), salinity (psu PSS-78), latitude (decimal degrees), longitude (decimal degrees) Returns: density (kg/m**3), """ correct_sizes = (temperature.size == pressure.size == salinity.size == latitude.size == longitude.size) if correct_sizes is False: raise ValueError('Arguments must all be the same length') with warnings.catch_warnings(): warnings.simplefilter("ignore") absolute_salinity = SA_from_SP(salinity, pressure, longitude, latitude) conservative_temperature = CT_from_t(absolute_salinity, temperature, pressure) density = rho(absolute_salinity, conservative_temperature, pressure) return density
def add_density(netCDFfile): # loads the netcdf file ds = Dataset(netCDFfile, 'a') if 'DENSITY' in list(ds.variables): ds.close() return "file already contains density" # extracts the variables from the netcdf var_temp = ds.variables["TEMP"] var_psal = ds.variables["PSAL"] var_pres = ds.variables["PRES"] var_lon = ds.variables["LONGITUDE"] var_lat = ds.variables["LATITUDE"] # extracts the data from the variables t = var_temp[:] psal = var_psal[:] p = var_pres[:] lon = var_lon[:] lat = var_lat[:] # calculates absolute salinity SA = gsw.SA_from_SP(psal, p, lon, lat) # calculates conservative temperature CT = gsw.CT_from_t(SA, t, p) # calculates density density = gsw.rho(SA, CT, p) # generates a new variable 'DENSITY' in the netcdf ncVarOut = ds.createVariable( "DENSITY", "f4", ("TIME", ), fill_value=np.nan, zlib=True) # fill_value=nan otherwise defaults to max # assigns the calculated densities to the DENSITY variable, sets the units as kg/m^3, and comments on the variable's origin ncVarOut[:] = density ncVarOut.units = "kg/m^3" ncVarOut.long_name = "sea_water_density" ncVarOut.standard_name = "sea_water_density" ncVarOut.valid_max = np.float32( 1100 ) # https://oceanobservatories.org/wp-content/uploads/2015/09/1341-10004_Data_Product_SPEC_GLBLRNG_OOI.pdf ncVarOut.valid_min = np.float32(1000) ncVarOut.comment = "calculated using gsw-python https://teos-10.github.io/GSW-Python/index.html" # update the history attribute try: hist = ds.history + "\n" except AttributeError: hist = "" ds.setncattr( 'history', hist + datetime.utcnow().strftime("%Y-%m-%d") + " : added DENSITY from TEMP, PSAL, PRES, LAT, LON") ds.close()
def comp_rhostar(Si, Ti, lat): pi = gsw.p_from_z(-zref, lat) cs = gsw.sound_speed(Si, Ti, pi) Ri = gsw.rho(Si, Ti, pi) g = gsw.grav(lat, pi[0]) E = np.zeros((len(zref), )) #plt.plot(Ri, -zref) f = interpolate.interp1d(zref, cs) def e(x): return -g / f(x)**2 if True: for k, z in enumerate(zref): if k == 0: r, E[k] = 0., 1. else: #r1,p = integrate.quad(e,zref[k-1],z,epsrel=1e-1) x = np.linspace(zref[k - 1], z, 10) dx = x[1] - x[0] r1 = integrate.trapz(e(x), dx=dx) r += r1 E[k] = np.exp(r) return Ri * E, E
def __init__(self, name=None, units='kg/m^3', temperature=None, salinity=None): if (temperature is None or salinity is None or not isinstance(temperature, TemperatureTS) or not isinstance(salinity, SalinityTS)): raise ValueError('Must provide temperature and salinity ' 'time series Environment objects') if len(temperature.time.time) > len(salinity.time.time): density_times = temperature.time else: density_times = salinity.time dummy_pt = np.array([ [0, 0], ]) import gsw from gnome import constants data = [ gsw.rho(salinity.at(dummy_pt, t), temperature.at(dummy_pt, t, units='C'), constants.atmos_pressure * 0.0001) for t in density_times.time ] TimeseriesData.__init__(self, name, units, time=density_times, data=data)
def __init__(self, name=None, units='kg/m^3', temperature=None, salinity=None): if (temperature is None or salinity is None or not isinstance(temperature, TemperatureTS) or not isinstance(salinity, SalinityTS)): raise ValueError('Must provide temperature and salinity ' 'time series Environment objects') if len(temperature.time.time) > len(salinity.time.time): density_times = temperature.time else: density_times = salinity.time dummy_pt = np.array([[0, 0], ]) import gsw from gnome import constants data = [gsw.rho(salinity.at(dummy_pt, t), temperature.at(dummy_pt, t, units='C'), constants.atmos_pressure * 0.0001) for t in density_times.time] TimeseriesData.__init__(self, name, units, time=density_times, data=data)
def compute_pot_density(prefix, inGridName, inDir): config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName description = 'Monthly potential density climatologies from ' \ '2005-2010 average of the Southern Ocean State ' \ 'Estimate (SOSE)' botDescription = 'Monthly potential density climatologies at sea ' \ 'floor from 2005-2010 average from SOSE' for gridName in [inGridName, outGridName]: outFileName = '{}_pot_den_{}.nc'.format(prefix, gridName) TFileName = '{}_pot_temp_{}.nc'.format(prefix, gridName) SFileName = '{}_salinity_{}.nc'.format(prefix, gridName) if not os.path.exists(outFileName): with xarray.open_dataset(TFileName) as dsT: with xarray.open_dataset(SFileName) as dsS: dsPotDensity = dsT.drop(['theta', 'botTheta']) lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.z) pressure = gsw.p_from_z(z.values, lat.values) SA = gsw.SA_from_SP(dsS.salinity.values, pressure, lon.values, lat.values) CT = gsw.CT_from_pt(SA, dsT.theta.values) dsPotDensity['potentialDensity'] = (dsS.salinity.dims, gsw.rho(SA, CT, 0.)) dsPotDensity.potentialDensity.attrs['units'] = \ 'kg m$^{-3}$' dsPotDensity.potentialDensity.attrs['description'] = \ description lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.zBot) pressure = gsw.p_from_z(z.values, lat.values) SA = gsw.SA_from_SP(dsS.botSalinity.values, pressure, lon.values, lat.values) CT = gsw.CT_from_pt(SA, dsT.botTheta.values) dsPotDensity['botPotentialDensity'] = \ (dsS.botSalinity.dims, gsw.rho(SA, CT, 0.)) dsPotDensity.botPotentialDensity.attrs['units'] = \ 'kg m$^{-3}$' dsPotDensity.botPotentialDensity.attrs['description'] = \ botDescription write_netcdf(dsPotDensity, outFileName)
def test_density_from_depth(self): self.ambient = pyplume.Ambient(self.z_max, self.salinity, self.temperature, depth=self.depth) self.assertEqual( self.ambient.rho.tolist(), gsw.rho(self.salinity, self.temperature, self.pressure).tolist())
def ts(datadir): """ Plot T S for all files """ temp = [] sal = [] depth = [] # pdens = [] for filename in os.listdir(datadir): data = xr.open_dataset(os.path.join(datadir,filename)) for (t,s,d) in zip(data.ptemp_bal.values, data.ab_sal_bal.values, data.DEPTH.values): temp.append(t) sal.append(s) depth.append(d) # pdens.append(gsw.rho(s,t,d)) #save lists as arrays temp=np.asarray(temp) sal=np.asarray(sal) depth=np.asarray(depth) #### Generate density contours smin = np.nanmin(sal) - (0.01 * np.nanmin(sal)) smax = np.nanmax(sal) + (0.01 * np.nanmax(sal)) tmin = np.nanmin(temp) - (0.1 * np.nanmin(temp)) tmax = np.nanmax(temp) + (0.1 * np.nanmax(temp)) # Calculate how many gridcells we need in the x and y dimensions xdim =int(round((smax-smin)/0.1+1,0)) ydim = int(round((tmax-tmin)+1,0)) # Create empty grid of zeros dens = np.zeros((ydim,xdim)) # # Create temp and salt vectors of appropriate dimensions ti = np.linspace(0,ydim-1,ydim)+tmin si = np.linspace(0,xdim-1,xdim)*0.1+smin # print(si) # # Loop to fill in grid with densities for j in range(0,int(ydim)): for i in range(0, int(xdim)): dens[j,i]=gsw.rho(si[i],ti[j],0) dens=dens-1000. # print(dens) fig,ax = plt.subplots(1,1,figsize=(5,5)) ax.scatter(sal,temp,c='k',s=5,marker='o') scatt = ax.scatter(sal,temp,c=depth,s=3,cmap='viridis_r') cb = plt.colorbar(scatt) cb.set_label('Depth (m)') CS = ax.contour(si,ti,dens, linestyles='dashed', colors='Gray',alpha=0.6) ax.clabel(CS, fontsize=12, inline=1, fmt='%1.0f') # Label every second level ax.set_xlabel('Absolute salinity (psu)') ax.set_ylabel(u'Temperature ($^{\circ}$C)') plt.savefig('Figures/ForReport/ts.pdf') plt.savefig('Figures/Raw/ts_conv_baltic.png') plt.savefig('Figures/Raw/ts_conv_baltic.pdf')
def wallPlume(z, y, ambient, z_max, MELT=True): """Solve the equations for a wallPlume wallPlume formulation (halfCone in Cowton et al.) See: Cowton et al. (2015) DOI: 10.1002/2014JC010324 """ # this was a safety check at some point - is it still needed?! if z > z_max: return None # initialise array for output ydot = np.zeros(y.shape) # calculate melt rate if required if MELT: t_b, s_b, mdot = get_melt(y[1], y[2], y[3], ambient.get_pres_z(z)) else: t_b = 0. s_b = 0. mdot = 0. # get ambient conditions at whatever depth we're at t_amb = ambient.get_temp_z(z) s_amb = ambient.get_sal_z(z) rho_a = ambient.get_rho_z(z) # approximate pressure at the current depth in dbar pressure = ambient.get_pres_z(z) # calculate current plume density (needs pressure in decibar) # gives density in kg/m3 rho_p = gsw.rho(y[3], y[2], pressure) # check if Neutral Buoyancy is reached, if so this forces values to be nan if rho_p > rho_a: y[0] = np.nan y[1] = np.nan # Solve the plume equations and store in ydot ydot[0] = (2. * const.E_0 + 4. * mdot / (math.pi * y[1]) - y[0] * const.G * (rho_a - rho_p) / (2. * y[1] * y[1] * const.RHO_REF) + 2. * (const.C_D / math.pi)) ydot[1] = (-2. * const.E_0 * y[1] / y[0] - 4. * mdot / (math.pi * y[0]) + const.G * (rho_a - rho_p) / (y[1] * const.RHO_REF) - 4. * const.C_D * y[1] / (math.pi * y[0])) ydot[2] = (2. * const.E_0 * (t_amb - y[2]) / y[0] + 4. * mdot * (t_b - y[2]) / (math.pi * y[0] * y[1]) - 4. * const.GAM_T * (const.C_D**0.5) * (y[2] - t_b) / (math.pi * y[0])) ydot[3] = (2. * const.E_0 * (s_amb - y[3]) / y[0] + 4. * mdot * (s_b - y[3]) / (math.pi * y[0] * y[1]) - 4. * const.GAM_S * (const.C_D**0.5) * (y[3] - s_b) / (math.pi * y[0])) return ydot
def _get_density(self, salinity, temp): ''' use lru cache so we don't recompute if temp is not changing ''' temp_c = uc.convert('Temperature', self.units['temperature'], 'C', temp) # sea level pressure in decibar - don't expect atmos_pressure to change # also expect constants to have SI units rho = gsw.rho(salinity, temp_c, constants.atmos_pressure * 0.0001) return rho
def TSdiagram(TripNo, Savefig, Figpath, Form): # Get trip data Hydr, Data = TurData(TripNo) # Create variables with user-friendly names temp = Data.temp salt = Data.sal ox = Data.ox #PREPARE DATA MESH FOR DENSITY LINES # Figure out boudaries (mins and maxs) # round down or up to nearest 0.1 decimal smin, smax = varmin(salt), varmax(salt) tmin, tmax = varmin(temp), varmax(temp) # Calculate how many gridcells we need in the x and y dimensions sdim = round((smax - smin) * 10) tdim = round((tmax - tmin) * 100) # Create temp and salt vectors of appropiate dimensions ti = np.linspace(tmin, tmax, num=tdim) si = np.linspace(smin, smax, num=sdim) # Create empty grid of zeros dens = np.zeros((tdim, sdim)) # Loop to fill in grid with densities - 1000 for j in range(0, int(tdim)): for i in range(0, int(sdim)): dens[j, i] = gsw.rho(si[i], ti[j], 0) - 1000 #MAKING FIGURE fig1 = plt.figure() plt.title('Trip number: {}'.format(TripNo), fontsize=14) plt.xlabel('Salinity', fontsize=14) plt.ylabel('Temperature (C)', fontsize=14) #PLOTTING DENSITY LINES C1 = plt.contour(si, ti, dens, linestyles='dashed', colors='k') plt.clabel(C1, fontsize=12, inline=1, fmt='%2.2f') # PLOTTING DATA for StNum in Hydr.index: # Create variables with user-friendly names Temp = temp.loc[StNum] Salt = salt.loc[StNum] Ox = ox.loc[StNum] C2 = plt.scatter(Salt, Temp, c=Ox, s=50, marker='o', cmap='rainbow') cbar = plt.colorbar(C2) cbar.ax.set_ylabel('oxygen') if Savefig: path = Figpath + 'TSdiagram.{}'.format(Form) plt.savefig(path, format=Form, dpi=400, bbox_inches='tight')
def calc_buoyancyflux(ds,xv,yv): import gsw # Calculate buoyancy flux r0 = gsw.rho(ds.variables['vosaline'][0,0,yv,xv],ds.variables['votemper'][0,0,yv,xv],0) alpha = gsw.alpha(ds.variables['vosaline'][0,0,yv,xv],ds.variables['votemper'][0,0,yv,xv],0) beta = gsw.beta(ds.variables['vosaline'][0,0,yv,xv],ds.variables['votemper'][0,0,yv,xv],0) D_T = -(alpha/gsw.cp0)*ds.variables['sohefldo'][0,yv,xv] D_S = r0*beta*ds.variables['vosaline'][0,0,yv,xv]*ds.variables['sowaflup'][0,yv,xv]/1000 return D_T + D_S
def test(self): sal = np.array([0.1, 0.1]) # temp = np.array([4., 21.]) # Celsius pres = np.array([10., 20.]) rho = gsw.rho(sal, temp, pres) print("density", rho) lat = [43.2, 43.2] CT = gsw.CT_from_t(sal, temp, pres) N2, p_mid = gsw.Nsquared(sal, CT, pres, lat=lat) print("N2", N2) print("p_mid", p_mid)
def potentialVorticityAtHautala(self,depth,debug=False,halfdistance=35): index = np.where(np.asarray(self.ipres) == depth)[0] if index >36: index = index[0] densities = gsw.rho(self.isals,\ self.itemps,\ self.ipres[index]) drhodz,notvalue = self.dz(depth,densities,35) pv = -(self.f/notvalue)*drhodz return pv,drhodz return None,None
def test_L2_params(self): self.contexts = _get_pc_dict('tempwat_l1', 'condwat_l1', 'preswat_l1', 'pracsal', 'density') self.value_classes = {} dom_set = SimpleDomainSet((10, )) # Add the callback for retrieving values for n, p in self.contexts.iteritems(): if hasattr(p, '_pval_callback'): p._pval_callback = self._get_param_vals p._ctxt_callback = self._ctxt_callback self.value_classes[n] = get_value_class(p.param_type, dom_set) # Get the L2 data psval = get_value_class(self.contexts['pracsal'].param_type, dom_set) rhoval = get_value_class(self.contexts['density'].param_type, dom_set) # Perform assertions - involves "manual" calculation of values # Get the L0 data needed for validating output latvals = self._get_param_vals('lat', slice(None)) lonvals = self._get_param_vals('lon', slice(None)) # Get the L1 data needed for validating output t1val = get_value_class(self.contexts['tempwat_l1'].param_type, dom_set) c1val = get_value_class(self.contexts['condwat_l1'].param_type, dom_set) p1val = get_value_class(self.contexts['preswat_l1'].param_type, dom_set) # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project: # https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1 # pracsal = gsw.SP_from_C((condwat_l1 * 10), tempwat_l1, preswat_l1) import gsw ps = gsw.SP_from_C((c1val[:] * 10.), t1val[:], p1val[:]) np.testing.assert_allclose(psval[:], ps) # absolute_salinity = gsw.SA_from_SP(pracsal, preswat_l1, longitude, latitude) # conservative_temperature = gsw.CT_from_t(absolute_salinity, tempwat_l1, preswat_l1) # density = gsw.rho(absolute_salinity, conservative_temperature, preswat_l1) abs_sal = gsw.SA_from_SP(psval[:], p1val[:], lonvals, latvals) cons_temp = gsw.CT_from_t(abs_sal, t1val[:], p1val[:]) rho = gsw.rho(abs_sal, cons_temp, p1val[:]) np.testing.assert_allclose(rhoval[:], rho)
def teos10_insitu_dens(t, s, z, lat, lon): """ Computes the insitu density from potential temperature and salinity using the Thermodynamic Equation of Seawater 2010 (TEOS-10; IOC, SCOR and IAPSO, 2010). http://www.teos-10.org/pubs/TEOS-10_Manual.pdf """ depth = np.ones_like(t) * z[None, :, None] lat = np.ones_like(t) * lat[None, None, :] lon = np.ones_like(t) * lon[None, None, :] p = gsw.p_from_z(-depth, lat) SA = gsw.SA_from_SP(s, p, lon, lat) CT = gsw.CT_from_pt(SA, t) rho = gsw.rho(SA, CT, p) return rho
def get_seawater_densities(file_ctd_mat, t, lon, lat, max_depth): import gsw import numpy transects = read_matlab(file_ctd_mat) # Find nearest station nearest_key, nearest_idx, min_dist = find_nearest_station( lon, lat, transects) # Cacluate mean salinity above 18m mean_sal = calc_mean_salinity(transects, nearest_key, nearest_idx, max_depth) SA = numpy.asarray([mean_sal] * len(t)) p = numpy.zeros(len(t)) return gsw.rho(SA, t, p)
def sa_ct_rho_sigmatheta(temperature, salinity, pressure, latitude=50, longitude=-65): """ Get common thermodynamic conversions of T-S data. Parameters ---------- temperature: float or 1D array In situ temperature. salinity: float or 1D array Practical salinity. pressure: float or 1D array Sea pressure (dBar). longitude, latitude: float or 1D array Geographical coordinates. Returns ------- SA: float or 1D array Absolute salinity. CT: float or 1D array Conservative temperature. rho: float or 1D array In situ density. ST: float or 1D array Potential density anomaly. """ # Get absolute salinity SA = gsw.SA_from_SP(salinity, pressure, longitude, latitude) # Get conservative temperature CT = gsw.CT_from_t(SA, temperature, pressure) # Get in situ density rho = gsw.rho(SA, CT, pressure) # Get density anomaly sigma_theta = gsw.density.sigma0(SA, CT) return SA, CT, rho, sigma_theta
def test_L2_params(self): self.contexts = _get_pc_dict('tempwat_l1', 'condwat_l1', 'preswat_l1', 'pracsal', 'density') self.value_classes = {} dom_set = SimpleDomainSet((10,)) # Add the callback for retrieving values for n, p in self.contexts.iteritems(): if hasattr(p, '_pval_callback'): p.param_type.callback = self._get_param_vals p._ctxt_callback = self._ctxt_callback self.value_classes[n] = get_value_class(p.param_type, dom_set) # Get the L2 data psval = get_value_class(self.contexts['pracsal'].param_type, dom_set) rhoval = get_value_class(self.contexts['density'].param_type, dom_set) # Perform assertions - involves "manual" calculation of values # Get the L0 data needed for validating output latvals = self._get_param_vals('lat', slice(None)) lonvals = self._get_param_vals('lon', slice(None)) # Get the L1 data needed for validating output t1val = get_value_class(self.contexts['tempwat_l1'].param_type, dom_set) c1val = get_value_class(self.contexts['condwat_l1'].param_type, dom_set) p1val = get_value_class(self.contexts['preswat_l1'].param_type, dom_set) # Density & practical salinity calucluated using the Gibbs Seawater library - available via python-gsw project: # https://code.google.com/p/python-gsw/ & http://pypi.python.org/pypi/gsw/3.0.1 # pracsal = gsw.SP_from_C((condwat_l1 * 10), tempwat_l1, preswat_l1) import gsw ps = gsw.SP_from_C((c1val[:] * 10.), t1val[:], p1val[:]) np.testing.assert_allclose(psval[:], ps) # absolute_salinity = gsw.SA_from_SP(pracsal, preswat_l1, longitude, latitude) # conservative_temperature = gsw.CT_from_t(absolute_salinity, tempwat_l1, preswat_l1) # density = gsw.rho(absolute_salinity, conservative_temperature, preswat_l1) abs_sal = gsw.SA_from_SP(psval[:], p1val[:], lonvals, latvals) cons_temp = gsw.CT_from_t(abs_sal, t1val[:], p1val[:]) rho = gsw.rho(abs_sal, cons_temp, p1val[:]) np.testing.assert_allclose(rhoval[:], rho)
def SA_CT_plot(SA, CT, p_ref=0, isopycs=5, title_string=''): # if less than two input vars, error: "You need to supply both # Absolute Salinity and Conservative Temperature" # if len(p_ref)>1: error: Multiple reference pressures min_SA_data = np.amin(SA) max_SA_data = np.amax(SA) min_CT_data = np.amin(CT) max_CT_data = np.amax(CT) SA_min = max(0.0, min_SA_data - 0.1 * (max_SA_data - min_SA_data)) SA_max = max_SA_data + 0.1 * (max_SA_data - min_SA_data) SA_axis = np.arange(SA_min, SA_max + (SA_max - SA_min) / 400, (SA_max - SA_min) / 200) CT_freezing = gsw.CT_freezing(SA_axis, p_ref, 0) CT_min = min_CT_data - 0.1 * (max_CT_data - min_CT_data) CT_max = max_CT_data + 0.1 * (max_CT_data - min_CT_data) if CT_min > np.min(CT_freezing): CT_min = min_CT_data - 0.1 * (max_CT_data - min(CT_freezing)) CT_axis = np.arange(CT_min, CT_max + (CT_max - CT_min) / 400, (CT_max - CT_min) / 200) SA_gridded, CT_gridded = np.meshgrid(SA_axis, CT_axis) isopycs_gridded = gsw.rho(SA_gridded, CT_gridded, p_ref) - 1000.0 c1 = plt.contour(SA_gridded, CT_gridded, isopycs_gridded, isopycs, colors='k') plt.clabel(c1, inline=1, fontsize=10) c2 = plt.plot(SA, CT, '.-', linewidth=2, markersize=10) # axis square? plt.axis((SA_min, SA_max, CT_min, CT_max)) plt.xlabel('Absolute Salinity $\it{S}_A$ (g kg$^{-1}$)') plt.ylabel('Conservative Temperature, $\Theta$ ($^\circ$C)') if len(title_string) > 0: plt.title(title_string) else: plt.title('$\it{S}_A$ - $\Theta$ diagram:' + ' p$_{ref}$ = ' + str(p_ref) + ' dbar') plt.plot(SA_axis, CT_freezing, '--')
def get_contour_arrays(x_min, x_max, y_min, y_max): """Calculate how many gridcells we need in the x and y dimensions. Assuming x_key = Salinity and y_key = Temperature """ xdim = int(round((x_max - x_min) / 0.1 + 1, 0)) ydim = int(round((y_max - y_min) / 0.1 + 1, 0)) t_m = np.zeros((ydim, xdim)) s_m = np.zeros((ydim, xdim)) dens = np.zeros((ydim, xdim)) ti = np.linspace(1, ydim - 1, ydim) * 0.1 + y_min si = np.linspace(1, xdim - 1, xdim) * 0.1 + x_min for j in range(0, int(ydim)): for i in range(0, int(xdim)): dens[j, i] = gsw.rho(si[i], ti[j], 0) s_m[j, i] = si[i] t_m[j, i] = ti[j] dens = dens return dens, t_m, s_m
def __init__(self, z_max, salinity, temperature, pressure=None, depth=None): if depth is None and pressure is None: raise ValueError( "Must pass either pressure or depth as an argument to Ambient") elif depth is None and pressure is not None: if min(pressure) != 0.: raise ValueError("Profile must start from the surface for the " \ "interpolation, try copying the minimum depth values to be the " \ "surface values") self.pressure = pressure self.depth = pressure / (1027. * 9.81 * 1.e-4) elif depth is not None and pressure is None: if min(depth) != 0: raise ValueError("Profile must start from the surface for the " \ "interpolation, try copying the minimum depth values to be the " \ "surface values") self.pressure = depth * (1027. * 9.81 * 1.e-4) self.depth = depth else: if np.any(depth != pressure / (1027. * 9.81 * 1.e-4)): print("UserWarning: You specified a pressure and depth profile "\ "which may not match assumptions used elsewhere in this model") self.pressure = pressure self.depth = depth self.z_max = z_max self.z = z_max - self.depth self.salinity = salinity self.temperature = temperature self.rho = gsw.rho(salinity, temperature, self.pressure) # internal functions to return interpolated values at arbitrary depths self.__f_sal_d = interp1d(self.depth, salinity) self.__f_temp_d = interp1d(self.depth, temperature) self.__f_rho_d = interp1d(self.depth, self.rho) self.__f_pres_d = interp1d(self.depth, self.pressure)
def gsw_SA_CT_rho_sigma0(temperature, salinity, pressure, lon=-60, lat=47): """ Get abs. sal., cons. temp, in situ and potential densities. Parameters ---------- temperature: float or array In situ temperature [degreeC]. salinity: float or array Practical salinity [PSU]. pressure: float or array Sea pressure or depth [m or decibars]. lon: float or array Longitude of measurement [degrees+east]. lat: float or array Latitude of measurement [degrees+north]. Returns ------- float or array Absolute salinity. float or array Conservative temperature. float or array In situ density. float or array Potential density. """ # Get absolute salinity SA = gsw.SA_from_SP(salinity, pressure, lon, lat) # Get conservative temperature CT = gsw.CT_from_t(SA, temperature, pressure) # Get in situ density rho = gsw.rho(SA, CT, pressure) # Get density anomaly sigma0 = gsw.density.sigma0(SA, CT) return SA, CT, rho, sigma0
def tef_2d_show(p, s, t, path, filename, plot_dens=True): fs = 20 p = p.transpose() #transpose q from tef_2d output, to be able to plot it if plot_dens: #calculate density lines in plot ydim = np.shape(t)[0] xdim = np.shape(s)[0] dens = np.zeros(shape=(xdim, ydim)) for i in range(0, int(xdim)): #print(i) dens[:, i] = gsw.rho(s[i], t, 0) dens = dens - 1000.0 vmax = np.max(p) #do the plot: fig = plt.figure(figsize=(10, 10)) fig.set_size_inches(10, 10) ax = plt.subplot() p4 = ax.pcolor(s, t, p, cmap='seismic', vmin=-vmax, vmax=vmax) #p4=ax.scatter(ss, tt, c=qq/1000000.0, marker='o', s = 4**2,cmap='seismic',vmin=-vmax,vmax=vmax,edgecolors='face') #levels = np.linspace(-vmax,vmax,64) #p4=ax.contourf(s_new2, t_new2, qv/1000000.0, levels=levels, cmap='seismic',vmin=-vmax,vmax=vmax) CS = ax.contour(s, t, dens, linestyles='dashed', colors='k') plt.clabel(CS, fontsize=12, inline=1, fmt='%1.1f') ax.tick_params('both', colors='black', labelsize=fs) ax.set_xlabel('salinity [g/kg]', fontsize=fs) ax.set_ylabel('temperature [$^\circ$C]', fontsize=fs) ax.yaxis.set_major_formatter(FormatStrFormatter('%.1f')) ax.xaxis.set_major_formatter(FormatStrFormatter('%.1f')) v = np.linspace(-vmax, vmax, 10, endpoint=True) cbar = plt.colorbar(p4, ticks=v, format="%.2f") cbar.set_label('p [m$^3$s$^{-1}$(g/kg)$^{-1}$K$^{-1}$]', fontsize=fs) cbar.ax.tick_params(labelsize=fs) plt.ylim([t[0], t[-1]]) plt.xlim([s[0], s[-1]]) plt.gcf().subplots_adjust(bottom=0.15) print('saving png...') plt.savefig(path + filename, format='png', bbox_inches='tight') plt.close() return ('done')
def SA_CT_plot(SA, CT, p_ref=0, isopycs=5, title_string=''): # if less than two input vars, error: "You need to supply both # Absolute Salinity and Conservative Temperature" # if len(p_ref)>1: error: Multiple reference pressures min_SA_data = np.amin(SA) max_SA_data = np.amax(SA) min_CT_data = np.amin(CT) max_CT_data = np.amax(CT) SA_min = max(0.0, min_SA_data-0.1*(max_SA_data - min_SA_data)) SA_max = max_SA_data + 0.1*(max_SA_data - min_SA_data) SA_axis = np.arange( SA_min, SA_max + (SA_max-SA_min)/400, (SA_max-SA_min)/200) CT_freezing = gsw.CT_freezing(SA_axis, p_ref, 0) CT_min = min_CT_data - 0.1*(max_CT_data - min_CT_data) CT_max = max_CT_data + 0.1*(max_CT_data - min_CT_data) if CT_min > np.min(CT_freezing): CT_min = min_CT_data - 0.1*(max_CT_data - min(CT_freezing)) CT_axis = np.arange( CT_min, CT_max + (CT_max-CT_min)/400, (CT_max-CT_min)/200) SA_gridded, CT_gridded = np.meshgrid(SA_axis, CT_axis) isopycs_gridded = gsw.rho(SA_gridded, CT_gridded,p_ref) - 1000.0 c1 = plt.contour( SA_gridded, CT_gridded, isopycs_gridded, isopycs, colors='k') plt.clabel(c1, inline=1, fontsize=10) c2 = plt.plot(SA, CT, '.-', linewidth=2, markersize=10) # axis square? plt.axis((SA_min, SA_max, CT_min, CT_max)) plt.xlabel('Absolute Salinity $\it{S}_A$ (g kg$^{-1}$)') plt.ylabel('Conservative Temperature, $\Theta$ ($^\circ$C)') if len(title_string) > 0: plt.title(title_string) else: plt.title('$\it{S}_A$ - $\Theta$ diagram:' + ' p$_{ref}$ = ' + str(p_ref) + ' dbar') plt.plot(SA_axis, CT_freezing, '--')
def process_ctd(ctd, lat=0, lon=0): """ Calculate practical-salinity and in-situ density from a raw CTD data-set. :param ctd: raw CTD data-set :type ctd: :class:`pandas.DataFrame` :param lat: data-set latitude in degrees :param lon: data-set longitude in degrees :returns: processed CTD data-set :rtype: :class:`pandas.DataFrame` """ pracsal = gsw.SP_from_C(ctd['condwat'], ctd['tempwat'], ctd['preswat']) sa = gsw.SA_from_SP(pracsal, ctd['preswat'], lon, lat) ct = gsw.CT_from_t(sa, ctd['tempwat'], ctd['preswat']) density = gsw.rho(sa, ct, ctd['preswat']) return pd.DataFrame({'timestamp': ctd['timestamp'], 'pracsal': pracsal, 'tempwat': ctd['tempwat'], 'preswat': ctd['preswat'], 'density': density})
def process_optode(ctd, optode, fc, lat=0, lon=0): """ Calculate dissolved oxygen from processed CTD and raw Optode data. :param ctd: processed CTD data-set :type ctd: :class:`pandas.DataFrame` :param optode: raw Optode data-set :type ctd: :class:`pandas.DataFrame` :param fc: Optode foil calibration coefficients :type fc: numpy array :param lat: data-set latitude in degrees :param lon: data-set longitude in degrees :returns: processed Optode data-set :rtype: :class:`pandas.DataFrame` """ # Interpolate CTD data onto the sample times of # the Optode data d = {} nan = float('NaN') for column in ('pracsal', 'tempwat', 'preswat'): d[column] = np.interp(optode['timestamp'], ctd['timestamp'], ctd[column], left=nan, right=nan) # Mask off any sample points that are outside of the # interpolation range. mask = ~(np.isnan(d['pracsal'])) sa = gsw.SA_from_SP(d['pracsal'][mask], d['preswat'][mask], lon, lat) ct = gsw.CT_from_t(sa, d['tempwat'][mask], d['preswat'][mask]) pdens = gsw.rho(sa, ct, np.zeros(len(sa))) do = dosv(optode['doconcs'][mask], optode['t'][mask], d['pracsal'][mask], d['preswat'][mask], pdens, fc) return pd.DataFrame({'timestamp': optode['timestamp'][mask], 'doxygen': do, 'preswat': d['preswat'][mask]})
def ts(salt, temp, p=0, **kw): """ Plot a Temperature-Salinity diagram (a.k.a TS-diagram). Argument -------- salt : Absolute salinity (in PSU), numpy array or python list temp : Conservative temperaure (in degree C), numpy array or python list p : sea pressure (in dbar),[ i.e. absolute pressure - 10.1325 dbar ] This is scalar value. Options ------- rholevels: numpy.array object or python list. A series of scalar value of density anomaly contours to be displayed on the TS-diagram. Author ------ Eyram K. Apetcho Contact ------- [email protected] Version ------- 0.1 License ------- BSD License, See the license """ salt = np.array(salt) temp = np.array(temp) p = np.array(p) rholevels = kw.pop('rholevels', None) if len(salt.shape) == 2 and len(temp.shape)==2: ms, ns = salt.shape mt, nt = temp.shape if not ((ms == mt) and ( ns == nt)): raise ValueError(''' The first two input must at most 2D arrays of same shape''') salt = salt.reshape((ms*ns, )) temp = temp.reshape((mt*nt, )) elif len(salt.shape) > 2 or len(temp.shape) > 2: raise ValueError('''I don't know how to handle array of more 2 dimensions ''') smin = np.nanmin(salt) - 0.01 * np.nanmin(salt) smax = np.nanmax(salt) + 0.01 * np.nanmax(salt) tmin = np.nanmin(temp) - 0.1 * np.nanmin(temp) tmax = np.nanmax(temp) - 0.1 * np.nanmax(temp) xdim = int(np.round((smax - smin)/0.1 + 1)) ydim = int(np.round((tmax - tmin) + 1)) # Remove NaNs from the input data sets. #n_elts, = salt.shape #new_temp= np.nan * np.zeros(temp.shape) #new_salt= np.nan * np.zeros(salt.shape) #for i in range(n_elts): rho = np.zeros((ydim, xdim)) tempi= np.linspace(0, ydim-1, ydim) + tmin salti= np.linspace(0, xdim-1, xdim)*0.1 + smin x , y = np.meshgrid( salti, tempi) rho = gsw.rho(x, y, p*np.ones(x.shape)) - 1000 if rholevels is None: cs = plt.contour(x, y, rho, colors='k',linestyles='dashed', linewidths=2) else: rholevels = np.array(rholelvels) cs = plt.contour(x, y, rho, rholevels, colors='k', linestyles='dashed', linewidths=2) plt.clabel(cs, inline=True, colors='b', fmt='%.2f') plt.xlabel(r' Salinity $(PSU)$') plt.ylabel(r' Temperature $ (^\circ C) $') plt.hold(True) plt.plot(salt, temp, 'or', markersize=9)
def plot_do(files, strait='DS', maxyear=None, savefig=False, figname=None): files = sorted(files) # make sure all files exist for fname in files: if not os.path.isfile(fname): raise IOError('File not found: ' + fname) # keyword arguments for reading the files kwargs = dict(key='df') if maxyear: kwargs['where'] = 'ModelYear<={}'.format(maxyear) fig,axx = plt.subplots( nrows=len(fieldsets), ncols=len(fieldsets[0]), sharex='all', sharey='row', figsize=(16,9)) spt = fig.suptitle('Strait: {}'.format(strait)) cases = [os.path.basename(fname).split('.do.h5')[0] for fname in files] # loop through files for nf, fname in enumerate(files): label = cases[nf] df = pd.read_hdf(fname, **kwargs) df = df.loc[strait] for i, fields in enumerate(fieldsets): for j, varn in enumerate(fields): ax = axx[i,j] ax.set_title(varn) if varn.startswith('rho_'): # compute density from T,S salt = rolling_mean(df[varn.replace('rho_','S')],365).values temp = rolling_mean(df[varn.replace('rho_','T')],365).values series = gsw.rho(salt, temp, 0)-1e3 else: # get series directly from file series = rolling_mean(df[varn],365).values x = df.index.get_level_values('ModelYear') ax.plot(x, series, label=label) # only once if nf == 0: # plot observations try: values = observations[strait][varn] try: obs_handle = ax.axhspan(values[0], values[1], **obs_props) except TypeError: ax.axhline(values, color='0.6666', linewidth=2) except KeyError: pass # legend with patch for observations handles, labels = axx.flat[0].get_legend_handles_labels() obs_handle = mpatches.Patch(**obs_props) handles += [obs_handle] labels += ['observations'] fig.subplots_adjust(right=0.8) lgd = fig.legend(handles, labels, bbox_to_anchor=(0.82,0.5), loc='center left', bbox_transform=fig.transFigure) # save figure to file if savefig or figname: figname = figname or 'ovf_props_{}_{}.pdf'.format(strait,'_'.join(cases)) fig.savefig(figname, bbox_extra_artists=(lgd,spt,), bbox_inches='tight') else: plt.show()
S2 = 32.0 p0 = 0 # dbar pressure at surface lat = 45 # N lon = -30 # E # First convert the measurments to absolute salinity Sa1 = sw.SA_from_SP(S1,p0,lon,lat) Sa2 = sw.SA_from_SP(S2,p0,lon,lat) # ...and conservative temperature. Tc1 = sw.CT_from_t(Sa1,T1,p0) Tc2 = sw.CT_from_t(Sa2,T2,p0) # Now calculate the density of each water parcel? rho1 = sw.rho(Sa1,T1,p0) rho2 = sw.rho(Sa2,T2,p0) #Which water mass is denser? print"The measurement 1 is", round(rho1-rho2,SF),"kg/m^2 denser than measurement 2." #What is their average density? print"Their average density is",round((rho1+rho2)/2,SF),"." # Now allow the two water masses to mix. When they mix, they homogenize their conservative temperature and absolute salinity. T3 = (T1+T2)/2 Sa3 = (Sa1+Sa2)/2 # What is the density of the new water mass? rho3 = sw.rho(Sa3,T3,p0) #sw.rho_CT() doesn't work, so I used rho(Sa,t,p) print"The density of the new water mass is",round(rho3,SF),"." print"The density of the new water mass is > rho1("+str(round(rho1,SF))+") and < rho2("+str(round(rho2,SF))+"), and",\ round(rho3-(rho1+rho2)/2,SF),"kg/m^2 denser than the average of the two water masses."
def mld(S,thetao,depth_cube,latitude_deg): """Compute the mixed layer depth. Parameters ---------- SA : array_like Absolute Salinity [g/kg] CT : array_like Conservative Temperature [:math:`^\circ` C (ITS-90)] p : array_like sea pressure [dbar] criterion : str, optional MLD Criteria Mixed layer depth criteria are: 'temperature' : Computed based on constant temperature difference criterion, CT(0) - T[mld] = 0.5 degree C. 'density' : computed based on the constant potential density difference criterion, pd[0] - pd[mld] = 0.125 in sigma units. `pdvar` : computed based on variable potential density criterion pd[0] - pd[mld] = var(T[0], S[0]), where var is a variable potential density difference which corresponds to constant temperature difference of 0.5 degree C. Returns ------- MLD : array_like Mixed layer depth idx_mld : bool array Boolean array in the shape of p with MLD index. Examples -------- >>> import os >>> import gsw >>> import matplotlib.pyplot as plt >>> from oceans import mld >>> from gsw.utilities import Bunch >>> # Read data file with check value profiles >>> datadir = os.path.join(os.path.dirname(gsw.utilities.__file__), 'data') >>> cv = Bunch(np.load(os.path.join(datadir, 'gsw_cv_v3_0.npz'))) >>> SA, CT, p = (cv.SA_chck_cast[:, 0], cv.CT_chck_cast[:, 0], ... cv.p_chck_cast[:, 0]) >>> fig, (ax0, ax1, ax2) = plt.subplots(nrows=1, ncols=3, sharey=True) >>> l0 = ax0.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='temperature') >>> l1 = ax0.plot(CT[idx], -p[idx], 'ro') >>> l2 = ax1.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='density') >>> l3 = ax1.plot(CT[idx], -p[idx], 'ro') >>> l4 = ax2.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='pdvar') >>> l5 = ax2.plot(CT[idx], -p[idx], 'ro') >>> _ = ax2.set_ylim(-500, 0) References ---------- .. [1] Monterey, G., and S. Levitus, 1997: Seasonal variability of mixed layer depth for the World Ocean. NOAA Atlas, NESDIS 14, 100 pp. Washington, D.C. """ #depth_cube.data = np.ma.masked_array(np.swapaxes(np.tile(depths,[360,180,1]),0,2)) MLD_out = S.extract(iris.Constraint(depth = np.min(depth_cube.data))) MLD_out_data = MLD_out.data for i in range(np.shape(MLD_out)[0]): print'calculating mixed layer for year: ',i thetao_tmp = thetao[i] S_tmp = S[i] depth_cube.data = np.abs(depth_cube.data) depth_cube = depth_cube * (-1.0) p = gsw.p_from_z(depth_cube.data,latitude_deg.data) # dbar SA = S_tmp.data*1.004715 CT = gsw.CT_from_pt(SA,thetao_tmp.data - 273.15) SA, CT, p = map(np.asanyarray, (SA, CT, p)) SA, CT, p = np.broadcast_arrays(SA, CT, p) SA, CT, p = map(ma.masked_invalid, (SA, CT, p)) p_min, idx = p.min(axis = 0), p.argmin(axis = 0) sigma = SA.copy() to_mask = np.where(sigma == S.data.fill_value) sigma = gsw.rho(SA, CT, p_min) - 1000. sigma[to_mask] = np.NAN sig_diff = sigma[0,:,:].copy() sig_diff += 0.125 # Levitus (1982) density criteria sig_diff = np.tile(sig_diff,[np.shape(sigma)[0],1,1]) idx_mld = sigma <= sig_diff #NEED TO SORT THS PIT - COMPARE WWITH OTHER AND FIX!!!!!!!!!! MLD = ma.masked_all_like(S_tmp.data) MLD[idx_mld] = depth_cube.data[idx_mld] * -1 MLD_out_data[i,:,:] = np.ma.max(MLD,axis=0) return MLD_out_data
lat = -40 # Convert depth to pressure p = gsw.p_from_z(-depth, lat) # Convert practical salinity to absolute salinity SA_CTRL = gsw.SA_from_SP(S_CTRL, p, lon, lat) SA_A1B = gsw.SA_from_SP(S_A1B, p, lon, lat) # Convert in-situ temperature to conservative temperature TC_CTRL = gsw.CT_from_t(SA_CTRL, T_CTRL, p) TC_A1B = gsw.CT_from_t(SA_A1B, T_A1B, p) # Calculate density on a T-S grid T_grid, S_grid = np.meshgrid(np.arange(0,35,0.05), np.arange(33,37,0.001)) rho = gsw.rho(S_grid, T_grid, 0) # SHOULD calc pot. dens. NOT in-situ density assuming no depth # Plot plt.figure() CS = plt.contour(S_grid, T_grid, rho, levels=np.arange(1018,1032,1), colors='k') plt.plot(SA_CTRL, TC_CTRL, 'k-', marker='o', markeredgecolor='k', linewidth=2) plt.plot(SA_A1B, TC_A1B, 'r-', marker='o', markeredgecolor='r', linewidth=2) plt.xlim(34.4,36.0) plt.ylim(0,22) plt.grid() plt.clabel(CS, inline=1, fontsize=10, manual=True) # ESC to end selection plt.xlabel('Absolute salinity [g/kg]') plt.ylabel(r'Conservative Temperature [$^\circ$C]')
def convert_to_mll(o2, s, t, p): '''Convert dissolved oxygen concentration from um/kg to ml/l. ''' return gsw.rho(s, t, p) * o2 / 44.66 / 1000.0
def post_process(self, verbose=True): print("\nPost processing") print("---------------\n") # Very basic self.ascent = self.hpid % 2 == 0 self.ascent_ctd = self.ascent*np.ones_like(self.UTC, dtype=int) self.ascent_ef = self.ascent*np.ones_like(self.UTCef, dtype=int) # Estimate number of observations. self.nobs_ctd = np.sum(~np.isnan(self.UTC), axis=0) self.nobs_ef = np.sum(~np.isnan(self.UTCef), axis=0) # Figure out some useful times. self.UTC_start = self.UTC[0, :] self.UTC_end = np.nanmax(self.UTC, axis=0) if verbose: print("Creating time variable dUTC with units of seconds.") self.dUTC = (self.UTC - self.UTC_start)*86400 self.dUTCef = (self.UTCef - self.UTC_start)*86400 if verbose: print("Interpolated GPS positions to starts and ends of profiles.") # GPS interpolation to the start and end time of each half profile. idxs = ~np.isnan(self.lon_gps) & ~np.isnan(self.lat_gps) self.lon_start = np.interp(self.UTC_start, self.utc_gps[idxs], self.lon_gps[idxs]) self.lat_start = np.interp(self.UTC_start, self.utc_gps[idxs], self.lat_gps[idxs]) self.lon_end = np.interp(self.UTC_end, self.utc_gps[idxs], self.lon_gps[idxs]) self.lat_end = np.interp(self.UTC_end, self.utc_gps[idxs], self.lat_gps[idxs]) if verbose: print("Calculating heights.") # Depth. self.z = gsw.z_from_p(self.P, self.lat_start) # self.z_ca = gsw.z_from_p(self.P_ca, self.lat_start) self.zef = gsw.z_from_p(self.Pef, self.lat_start) if verbose: print("Calculating distance along trajectory.") # Distance along track from first half profile. self.__ddist = utils.lldist(self.lon_start, self.lat_start) self.dist = np.hstack((0., np.cumsum(self.__ddist))) if verbose: print("Interpolating distance to measurements.") # Distances, velocities and speeds of each half profile. self.profile_ddist = np.zeros_like(self.lon_start) self.profile_dt = np.zeros_like(self.lon_start) self.profile_bearing = np.zeros_like(self.lon_start) lons = np.zeros((len(self.lon_start), 2)) lats = lons.copy() times = lons.copy() lons[:, 0], lons[:, 1] = self.lon_start, self.lon_end lats[:, 0], lats[:, 1] = self.lat_start, self.lat_end times[:, 0], times[:, 1] = self.UTC_start, self.UTC_end self.dist_ctd = self.UTC.copy() nans = np.isnan(self.dist_ctd) for i, (lon, lat, time) in enumerate(zip(lons, lats, times)): self.profile_ddist[i] = utils.lldist(lon, lat) # Convert time from days to seconds. self.profile_dt[i] = np.diff(time)*86400. d = np.array([self.dist[i], self.dist[i] + self.profile_ddist[i]]) idxs = ~nans[:, i] self.dist_ctd[idxs, i] = np.interp(self.UTC[idxs, i], time, d) self.dist_ef = self.__regrid('ctd', 'ef', self.dist_ctd) if verbose: print("Estimating bearings.") # Pythagorian approximation (?) of bearing. self.profile_bearing = np.arctan2(self.lon_end - self.lon_start, self.lat_end - self.lat_start) if verbose: print("Calculating sub-surface velocity.") # Convert to m s-1 calculate meridional and zonal velocities. self.sub_surf_speed = self.profile_ddist*1000./self.profile_dt self.sub_surf_u = self.sub_surf_speed*np.sin(self.profile_bearing) self.sub_surf_v = self.sub_surf_speed*np.cos(self.profile_bearing) if verbose: print("Interpolating missing velocity values.") # Fill missing U, V values using linear interpolation otherwise we # run into difficulties using cumtrapz next. self.U = self.__fill_missing(self.U) self.V = self.__fill_missing(self.V) # Absolute velocity self.calculate_absolute_velocity(verbose=verbose) if verbose: print("Calculating thermodynamic variables.") # Derive some important thermodynamics variables. # Absolute salinity. self.SA = gsw.SA_from_SP(self.S, self.P, self.lon_start, self.lat_start) # Conservative temperature. self.CT = gsw.CT_from_t(self.SA, self.T, self.P) # Potential temperature with respect to 0 dbar. self.PT = gsw.pt_from_CT(self.SA, self.CT) # In-situ density. self.rho = gsw.rho(self.SA, self.CT, self.P) # Potential density with respect to 1000 dbar. self.rho_1 = gsw.pot_rho_t_exact(self.SA, self.T, self.P, p_ref=1000.) # Buoyancy frequency regridded onto ctd grid. N2_ca, __ = gsw.Nsquared(self.SA, self.CT, self.P, self.lat_start) self.N2 = self.__regrid('ctd_ca', 'ctd', N2_ca) if verbose: print("Calculating float vertical velocity.") # Vertical velocity regridded onto ctd grid. dt = 86400.*np.diff(self.UTC, axis=0) # [s] Wz_ca = np.diff(self.z, axis=0)/dt self.Wz = self.__regrid('ctd_ca', 'ctd', Wz_ca) if verbose: print("Renaming Wp to Wpef.") # Vertical water velocity. self.Wpef = self.Wp.copy() del self.Wp if verbose: print("Calculating shear.") # Shear calculations. dUdz_ca = np.diff(self.U, axis=0)/np.diff(self.zef, axis=0) dVdz_ca = np.diff(self.V, axis=0)/np.diff(self.zef, axis=0) self.dUdz = self.__regrid('ef_ca', 'ef', dUdz_ca) self.dVdz = self.__regrid('ef_ca', 'ef', dVdz_ca) if verbose: print("Calculating Richardson number.") N2ef = self.__regrid('ctd', 'ef', self.N2) self.Ri = N2ef/(self.dUdz**2 + self.dVdz**2) if verbose: print("Regridding piston position to ctd.\n") # Regrid piston position. self.ppos = self.__regrid('ctd_ca', 'ctd', self.ppos_ca) self.update_profiles()
def read_tabs(table, buoy, dstart, dend): '''Read in TABS data from mysql. Also process variables as needed. Time from database is in UTC. dstart, dend are datetime objects.''' engine = tools.setup_engine() query = tools.query_setup(engine, buoy, table, dstart.strftime("%Y-%m-%d"), dend.strftime("%Y-%m-%d %H:%M")) df = pd.read_sql_query(query, engine, index_col=['obs_time']) engine.dispose() df.drop(df.index[df.index.isnull()], inplace=True) # drop bad rows df[(df == -99.0) | (df == -999.0) | (df == -999.00)] = np.nan # replace missing values if 'date' in df.keys(): df.drop(['date', 'time'], inplace=True, axis=1) for key in df.keys(): if (df[key]==0).all(): df.loc[:, key] = np.nan # if more than a quarter of the entries are 0, must be wrong elif (df[key][1::2]==0).sum() > len(df)/4: df.loc[1::2, key] = np.nan elif (df[key][::2]==0).sum() > len(df)/4: df.loc[::2, key] = np.nan if table == 'ven': ind = df.tx.isnull() df.drop(df.index[ind], inplace=True) # drop bad rows names = ['East [cm/s]', 'North [cm/s]', 'Dir [deg T]', 'WaterT [deg C]', 'Tx', 'Ty', 'Speed [cm/s]', 'Across [cm/s]', 'Along [cm/s]'] # df.columns = names df['Speed [cm/s]'] = np.sqrt(df['veast']**2 + df['vnorth']**2) df['Speed [cm/s]'] = df['Speed [cm/s]'].round(2) # Calculate along- and across-shelf # along-shelf rotation angle in math angle convention theta = np.deg2rad(-(bys[buoy]['angle']-90)) # convert from compass to math angle df['Across [cm/s]'] = df['veast']*np.cos(-theta) - df['vnorth']*np.sin(-theta) df['Along [cm/s]'] = df['veast']*np.sin(-theta) + df['vnorth']*np.cos(-theta) # dictionary for rounding decimal places rdict = {'Speed [cm/s]': 2, 'Across [cm/s]': 2, 'Along [cm/s]': 2, 'Dir [deg T]': 0} elif table == 'eng': names = ['VBatt [Oper]', 'SigStr [dB]', 'Comp [deg M]', 'Nping', 'Tx', 'Ty', 'ADCP Volt', 'ADCP Curr', 'VBatt [sleep]'] rdict = {} elif table == 'met': names = ['East [m/s]', 'North [m/s]', 'AirT [deg C]', 'AtmPr [mb]', 'Gust [m/s]', 'Comp [deg M]', 'Tx', 'Ty', 'PAR ', 'RelH [%]', 'Speed [m/s]', 'Dir from [deg T]'] df['Speed [m/s]'] = np.sqrt(df['veast']**2 + df['vnorth']**2) df['Dir from [deg T]'] = 90 - np.rad2deg(np.arctan2(-df['vnorth'], -df['veast'])) rdict = {'Speed [m/s]': 2, 'Dir from [deg T]': 0} elif table == 'salt': names = ['WaterT [deg C]', 'Cond [ms/cm]', 'Salinity', 'Density [kg/m^3]', 'SoundVel [m/s]'] rdict = {} # density is all 0s, so need to overwrite df['density'] = gsw.rho(df['salinity'], df['twater'], np.zeros(len(df))) elif table == 'wave': names = ['WaveHeight [m]', 'MeanPeriod [s]', 'PeakPeriod [s]'] rdict = {} df.columns = names df.index.name = 'Dates [UTC]' df = df.round(rdict) return df
def read_model(buoy, which, dstart, dend, timing='recent', units='Metric', tz='utc', s_rho=-1): '''Read in model output. dstart and dend are datetime objects. s_rho (-1, surface) is the index of model output depth. -1 for surface, a number between 0 and 29 for other depth levels, and -999 for all depths. ''' # separate out which model type we want # links in list are in order they are tried by the system if timing == 'hindcast': locs = ['http://barataria.tamu.edu:8080/thredds/dodsC/NcML/txla_hindcast_sta', 'http://copano.tamu.edu:8080/thredds/dodsC/NcML/txla_hindcast_sta', 'http://terrebonne.tamu.edu:8080/thredds/dodsC/NcML/txla_hindcast_sta_agg', 'http://barataria.tamu.edu:6060/thredds/dodsC/NcML/txla_hindcast_sta'] elif timing == 'recent': locs = ['http://terrebonne.tamu.edu:8080/thredds/dodsC/NcML/forecast_stn_archive_agg.nc', 'http://copano.tamu.edu:8080/thredds/dodsC/NcML/forecast_stn_archive_agg.nc', 'http://barataria.tamu.edu:8080/thredds/dodsC/NcML/forecast_stn_archive_agg.nc', 'http://barataria.tamu.edu:6060/thredds/dodsC/NcML/forecast_stn_archive_agg.nc'] elif timing == 'forecast': locs = ['http://terrebonne.tamu.edu:8080/thredds/dodsC/forecast_latest/txla2_stn_f_latest.nc', 'http://copano.tamu.edu:8080/thredds/dodsC/forecast_latest/txla2_stn_f_latest.nc', 'http://barataria.tamu.edu:8080/thredds/dodsC/forecast_latest/txla2_stn_f_latest.nc', 'http://barataria.tamu.edu:6060/thredds/dodsC/forecast_latest/txla2_stn_f_latest.nc'] varstot = ['u', 'v', 'temp', 'salt', 'dye_01', 'dye_02', 'dye_03', 'dye_04', 'Uwind', 'Vwind', 'Pair', 'Tair', 'Qair', 'zeta', 'shflux', 'sustr', 'svstr'] # Try different locations for model output. If won't work, give up. # loop over station files first since faster if can use, then regular files ibuoy = bp.station(buoy) # get location in stations file for buoy for i, loc in enumerate(locs): try: ds = xr.open_dataset(loc) # make sure all variables present assert np.asarray([var in ds for var in varstot]).all() break except KeyError as e: logging.exception(e) if i < len(locs)-1: # in case there is another option to try logging.warning('For model timing %s and buoy %s, station file loc %s did not work due to a KeyError. Trying with loc %s instead...' % (timing, buoy, loc, locs[i+1])) else: # no more options to try logging.warning('For model timing %s and buoy %s, station file loc %s did not work due to a KeyError. No more options.' % (timing, buoy, loc)) ds = None except RuntimeError as e: logging.exception(e) if i < len(locs)-1: # in case there is another option to try logging.warning('For model timing %s and buoy %s, loc %s did not work due to a RuntimeError. Trying with loc %s instead...' % (timing, buoy, loc, locs[i+1])) else: # no more options to try logging.warning('For model timing %s and buoy %s, loc %s did not work due to a RuntimeError. No more options.' % (timing, buoy, loc)) ds = None except IOError as e: # if link tried is not working logging.exception(e) if i < len(locs)-1: # in case there is another option to try logging.warning('For model timing %s and buoy %s, loc %s did not work due to an IOError. Trying with loc %s instead...' % (timing, buoy, loc, locs[i+1])) else: # no more options to try logging.warning('For model timing %s and buoy %s, loc %s did not work due to an IOError. No more options.' % (timing, buoy, loc)) ds = None except Exception as e: logging.exception(e) if i < len(locs)-1: # in case there is another option to try logging.warning('For model timing %s and buoy %s, loc %s did not work with an unexpected exception. Trying with loc %s instead...' % (timing, buoy, loc, locs[i+1])) else: # no more options to try logging.warning('For model timing %s and buoy %s, an unexpected exception occurred. No more options.' % (timing, buoy)) ds = None # only do this if dend is less than or equal to the first date in the model output # check if last data datetime is less than 1st model datetime or # first data date is greater than last model time, so that time periods overlap # sometimes called ocean_time and sometimes time # this case catches when the timing of the model is output the desired times if ds is None or dend <= pd.Timestamp(ds['ocean_time'].isel(ocean_time=0).data, tz='utc') or \ dstart >= pd.Timestamp(ds['ocean_time'].isel(ocean_time=-1).data, tz='utc'): df = None return df else: vars = ['u', 'v', 'temp', 'salt', 'dye_01', 'dye_02', 'dye_03', 'dye_04'] varnames = ['Along [cm/s]', 'Across [cm/s]', 'WaterT [deg C]', 'Salinity', 'Dissolved oxygen concentration [uM]', 'Mississippi passive tracer', 'Atchafalaya passive tracer', 'Brazos passive tracer'] vars_w = ['w'] # on vertical grid w varnames_w = ['Vertical velocity [m/s]'] if s_rho == -999: # all depths at once # don't add 2d variables if all depths requested # need to deal separately with s_rho and s_w grid df = ds[vars].sel(ocean_time=slice(dstart, dend)).isel(station=ibuoy).to_dataframe() # this brings in all times but cannot easily separate times. Just average. zr = octant.roms.nc_depths(netCDF.Dataset(loc), 'rho').get_station_depths().mean(axis=0)[:,ibuoy] df = df.reset_index(['s_rho']) df['s_rho'] = np.tile(zr, int(len(df)/zr.size)) df2 = ds[vars_w].sel(ocean_time=slice(dstart, dend)).isel(station=ibuoy).to_dataframe() # this brings in all times but cannot easily separate times. Just average. zw = octant.roms.nc_depths(netCDF.Dataset(loc), 'w').get_station_depths().mean(axis=0)[:,ibuoy] df2 = df2.reset_index(['s_w']) df2['s_w'] = np.tile(zw, int(len(df2)/zw.size)) # df2.rename(columns={'s_w': 'Depth [m]'}, inplace=True) df2.drop(['lon_rho', 'lat_rho', 's_w'], axis=1, inplace=True, errors='ignore') # interpolate ws to rho vertical grid df['w'] = np.nan ii = 0 for i in range(0,int(len(df2)/31),31): # i is for df2, ii is for df df['w'].iloc[ii*30:ii*30+30] = (df2['w'][i:i+30] + df2['w'][i+1:i+31])/2 ii += 1 else: if s_rho in [-1, 29]: # surface, more variables vars += ['Uwind', 'Vwind', 'Pair', 'Tair', 'Qair', 'zeta', 'shflux', 'sustr', 'svstr'] varnames += ['East [m/s]', 'North [m/s]', 'AtmPr [mb]', 'AirT [deg C]', 'RelH [%]', 'Free surface [m]', 'Surface net heat flux [W/m^2]', 'Surface u-momentum stress [N/m^2]', 'Surface v-momentum stress [N/m^2]'] df = ds[vars].sel(ocean_time=slice(dstart, dend)).isel(station=ibuoy, s_rho=s_rho).to_dataframe() # this brings in all times but cannot easily separate times. Just average. zr = octant.roms.nc_depths(netCDF.Dataset(loc), 'rho').get_station_depths().mean(axis=0)[s_rho,ibuoy] df = df.reset_index(level=0).set_index('ocean_time') # adjustments df['s_rho'] = np.tile(zr, int(len(df)/zr.size)) df.rename(columns={'s_rho': 'Depth [m]'}, inplace=True) df.index.rename('Dates [UTC]', inplace=True) df.drop(['lon_rho', 'lat_rho', 's_w'], axis=1, inplace=True, errors='ignore') df.rename(columns={var: varname for var, varname in zip(vars, varnames)}, inplace=True) df['Density [kg/m^3]'] = gsw.rho(df['Salinity'], df['WaterT [deg C]'], np.zeros(len(df))) if s_rho in [-1, 29]: df['RelH [%]'] *= 100 # un-rotate velocities, then rerotate to match TABS website angles # also convert to cm/s df['Along [cm/s]'] *= 100 df['Across [cm/s]'] *= 100 # rotate from curvilinear to cartesian anglev = ds['angle'][ibuoy] # using at least nearby grid rotation angle # Project along- and across-shelf velocity rather than use from model # so that angle matches buoy df['East [cm/s]'], df['North [cm/s]'] = tools.rot2d(df['Along [cm/s]'], df['Across [cm/s]'], anglev) # approximately to east, north theta = np.deg2rad(-(bys[buoy]['angle']-90)) # convert from compass to math angle if ~np.isnan(theta): df['Across [cm/s]'] = df['East [cm/s]']*np.cos(-theta) - df['North [cm/s]']*np.sin(-theta) df['Along [cm/s]'] = df['East [cm/s]']*np.sin(-theta) + df['North [cm/s]']*np.cos(-theta) return df
def mld(SA, CT, p, criterion='pdvar'): """ Compute the mixed layer depth. Parameters ---------- SA : array_like Absolute Salinity [g/kg] CT : array_like Conservative Temperature [:math:`^\circ` C (ITS-90)] p : array_like sea pressure [dbar] criterion : str, optional MLD Criteria Mixed layer depth criteria are: 'temperature' : Computed based on constant temperature difference criterion, CT(0) - T[mld] = 0.5 degree C. 'density' : computed based on the constant potential density difference criterion, pd[0] - pd[mld] = 0.125 in sigma units. `pdvar` : computed based on variable potential density criterion pd[0] - pd[mld] = var(T[0], S[0]), where var is a variable potential density difference which corresponds to constant temperature difference of 0.5 degree C. Returns ------- MLD : array_like Mixed layer depth idx_mld : bool array Boolean array in the shape of p with MLD index. Examples -------- >>> import os >>> import gsw >>> import matplotlib.pyplot as plt >>> from oceans import mld >>> from gsw.utilities import Bunch >>> # Read data file with check value profiles >>> datadir = os.path.join(os.path.dirname(gsw.utilities.__file__), 'data') >>> cv = Bunch(np.load(os.path.join(datadir, 'gsw_cv_v3_0.npz'))) >>> SA, CT, p = (cv.SA_chck_cast[:, 0], cv.CT_chck_cast[:, 0], ... cv.p_chck_cast[:, 0]) >>> fig, (ax0, ax1, ax2) = plt.subplots(nrows=1, ncols=3, sharey=True) >>> l0 = ax0.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='temperature') >>> l1 = ax0.plot(CT[idx], -p[idx], 'ro') >>> l2 = ax1.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='density') >>> l3 = ax1.plot(CT[idx], -p[idx], 'ro') >>> l4 = ax2.plot(CT, -p, 'b.-') >>> MDL, idx = mld(SA, CT, p, criterion='pdvar') >>> l5 = ax2.plot(CT[idx], -p[idx], 'ro') >>> _ = ax2.set_ylim(-500, 0) References ---------- .. [1] Monterey, G., and S. Levitus, 1997: Seasonal variability of mixed layer depth for the World Ocean. NOAA Atlas, NESDIS 14, 100 pp. Washington, D.C. """ SA, CT, p = list(map(np.asanyarray, (SA, CT, p))) SA, CT, p = np.broadcast_arrays(SA, CT, p) SA, CT, p = list(map(ma.masked_invalid, (SA, CT, p))) p_min, idx = p.min(), p.argmin() sigma = gsw.rho(SA, CT, p_min) - 1000. # Temperature and Salinity at the surface, T0, S0, Sig0 = CT[idx], SA[idx], sigma[idx] # NOTE: The temperature difference criterion for MLD Tdiff = T0 - 0.5 # 0.8 on the matlab original if criterion == 'temperature': idx_mld = (CT > Tdiff) elif criterion == 'pdvar': pdvar_diff = gsw.rho(S0, Tdiff, p_min) - 1000. idx_mld = (sigma <= pdvar_diff) elif criterion == 'density': sig_diff = Sig0 + 0.125 idx_mld = (sigma <= sig_diff) else: raise NameError("Unknown criteria %s" % criterion) MLD = ma.masked_all_like(p) MLD[idx_mld] = p[idx_mld] return MLD.max(axis=0), idx_mld
fname = ( "calcs/slope-burger/" + "N-" + str(years[tind]) + "-" + str(months[tind]).zfill(2) + "-" + str(days[tind]).zfill(2) + "T" + str(hours[tind]).zfill(2) + ".npz" ) if not os.path.exists(fname): salt = d.variables["salt"][tind, :, :, :] temp = d.variables["temp"][tind, :, :, :] rho = gsw.rho(salt, temp, 0) zeta = d.variables["zeta"][tind, :, :] zwt = octant.depths.get_zw( d.variables["Vtransform"][:][0], d.variables["Vstretching"][:][0], salt.shape[0], d.variables["theta_s"][:][0], d.variables["theta_b"][:][0], h, d.variables["hc"][:][0], zeta=zeta, Hscale=3, ) Ntemp = np.ma.median( np.sqrt(-g / rho0 * ((rho[2:, :, :] - rho[:-2, :, :]) / (zwt[2:, :, :] - zwt[:-2, :, :]))), axis=0,
def diagram_ts(T, S, p=0, lon=None, lat=None, use_teos10=True, is_state=True, dsigma=1., result='default', debug=False, **kwargs): """Plots T-S diagram. Under TEOS-10, the observed values of practical salinity and in situ temperature t need to be converted into absolute salinity and conservative temperature. Parameters ---------- T : array like In situ or conservative temperature [degC]. S : array like Practical or absolute salinity [unitless, g kg-1], according. p : array like Pressure [dbar]. If not given, assumes sea surface. lon, lat: float, array like To plot state diagram, longitude and latitude have to be given in decimal degrees. use_teos10 : boolean, optional If true (default), uses conservative temperature and absolute salinity according to the Thermodynamic Equation of SeaWater 2010 (TEOS-10). If longitude and latitude are not given, assumes that T and S have already been converted according to TEOS10. is_state : boolean, optional If true (default), plots the state diagram: density anomalies referenced to surface. dsigma : float, optional Sets the interval for each isopycnal in the state diagram. Default is 1. result : string, optional If `default` returns axis and handles objects. If `results` also returns converted absolute salinity and conservative temperature. debug : boolean, optional If true prints some statistics on screen. Returns ------- ax : axis hs : handles [CA, CT] : array_like """ keys = kwargs.keys() if 'format' not in keys: kwargs['format'] = '.' if 'zorder' not in keys: kwargs['zorder'] = 99 kwargs['return_handles'] = True # Calculates absolute salinity and conservative temperature. if use_teos10 and (lon is not None) and (lat is not None): SA = gsw.SA_from_SP(S, p, lon, lat) CT = gsw.CT_from_t(SA, T, p) else: SA = S CT = T if debug == True: dump = ['Mean differences', '----------------'] dT, dS = T - CT, S - SA dump.append('Temperature: {:.4f} � {:.4f}'.format(dT.mean(), dT.std())) dump.append('Salinity: {:.4f} � {:.4f}'.format(dS.mean(), dS.std())) dump.append('') print '\n'.join(dump) # Plots Theta - SA diagram. ax, hs = plot(SA, CT, **kwargs) # Calculates in-situ density from absolute salinity (SA) and conservative # temperature using `gsw` module. if is_state: #if (lon == None) | (lat == None): # raise ValueError('Missing longitude and latitude.') SA_lim = ax.get_xlim() CT_lim = ax.get_ylim() SA_range = numpy.linspace(SA_lim[0], SA_lim[1], 100) CT_range = numpy.linspace(CT_lim[0], CT_lim[1], 100) sigma_range = numpy.arange(0, 50.5, dsigma) SA_grid, CT_grid = numpy.meshgrid(SA_range, CT_range) # sigma_grid = gsw.rho(SA_grid, CT_grid, 0) - 1000 # cs = ax.contour(SA_grid, CT_grid, sigma_grid, sigma_range, colors='k', alpha=0.5, zorder=-98) cs.clabel(colors='k', alpha=0.5, fmt='%1.1f') if result == 'default': return ax, hs elif result == 'results': return ax, hs, SA, CT else: raise ValueError('Invalid return type `{}`'.format(result))