def correct_ts(datadir, filename, corr_coeff_filename): """ Corrects TB Temp and salinity according to correlation coefficients and adds corrected var to ncfile Also converts practical salinity to absolute salinity and conservative temperature to potential temperature """ coeffs = pd.read_csv(corr_coeff_filename, delim_whitespace=True) ls = os.listdir(datadir) ls.sort() for filename in ls: if filename[:11] == 'TB_20181211': # print(filename) data = xr.open_dataset(os.path.join(datadir, filename)) varkey = [i for i in data.data_vars.keys()] if 'lon' in varkey: print(filename) data['t_corrected']=('DEPTH', \ (coeffs.a_temp.values*data.TEMP+(coeffs.b_temp.values))) data['s_corrected']=('DEPTH', \ (coeffs.a_sal.values*data.PSAL+(coeffs.b_sal.values))) data['ab_sal']=('DEPTH', \ gsw.SA_from_SP(data.s_corrected,data.DEPTH,data.lon.values,data.lat.values)) data['ptemp']=('DEPTH', \ gsw.pt_from_CT(data.ab_sal,data.TEMP)) data['ab_sal_bal']=('DEPTH', \ gsw.SA_from_SP_Baltic(data.s_corrected,data.lon.values,data.lat.values)) data['ptemp_bal']=('DEPTH', \ gsw.pt_from_CT(data.ab_sal_bal,data.TEMP)) data.to_netcdf( os.path.join('Data/ctd_files/gridded_calibrated_updated', filename))
def gsw_rho(C, T, P, lon, lat): SP = gsw.SP_from_C(C, T, P) # This particular data set was collected in the Baltic. SA = gsw.SA_from_SP_Baltic(SP, lon, lat) # in-situ density rho = gsw.density.rho_t_exact(SA, T, P) return rho
def _compute_data(self,data, units, names, p_ref = 0, baltic = False, lon=0, lat=0, isen = '0'): """ Computes convservative temperature, absolute salinity and potential density from input data, expects a recarray with the following entries data['C']: conductivity in mS/cm, data['T']: in Situ temperature in degree Celsius (ITS-90), data['p']: in situ sea pressure in dbar Arguments: p_ref: Reference pressure for potential density baltic: if True use the Baltic Sea density equation instead of open ocean lon: Longitude of ctd cast default=0 lat: Latitude of ctd cast default=0 Returns: list [cdata,cunits,cnames] with cdata: recarray with entries 'SP', 'SA', 'pot_rho', etc., cunits: dictionary with units, cnames: dictionary with names """ sen = isen + isen # Check for units and convert them if neccessary if(units['C' + isen] == 'S/m'): logger.info('Converting conductivity units from S/m to mS/cm') Cfac = 10 if(('68' in units['T' + isen]) or ('68' in names['T' + isen]) ): logger.info('Converting IPTS-68 to T90') T = gsw.t90_from_t68(data['T' + isen]) else: T = data['T' + isen] SP = gsw.SP_from_C(data['C' + isen], T, data['p']) SA = gsw.SA_from_SP(SP,data['p'],lon = lon, lat = lat) if(baltic == True): SA = gsw.SA_from_SP_Baltic(SA,lon = lon, lat = lat) PT = gsw.pt0_from_t(SA, T, data['p']) CT = gsw.CT_from_t(SA, T, data['p']) pot_rho = gsw.pot_rho_t_exact(SA, T, data['p'], p_ref) names = ['SP' + sen,'SA' + sen,'pot_rho' + sen,'pt0' + sen,'CT' + sen] formats = ['float','float','float','float','float'] cdata = {} cdata['SP' + sen] = SP cdata['SA' + sen] = SA cdata['pot_rho' + sen] = pot_rho cdata['pt' + sen] = PT cdata['CT' + sen] = CT cnames = {'SA' + sen:'Absolute salinity','SP' + sen: 'Practical Salinity on the PSS-78 scale', 'pot_rho' + sen: 'Potential density', 'pt' + sen:'potential temperature with reference sea pressure (p_ref) = 0 dbar', 'CT' + sen:'Conservative Temperature (ITS-90)'} cunits = {'SA' + sen:'g/kg','SP' + sen:'PSU','pot_rho' + sen:'kg/m^3' ,'CT' + sen:'deg C','pt' + sen:'deg C'} return [cdata,cunits,cnames]
def calc_teos10_columns(self, lat, lng): # Practical Salinity SP = gsw.SP_from_C(self.data['Cond'], self.data['Temp'], self.data['Pres']) # Absolute Salinity SA = gsw.SA_from_SP_Baltic(SP, lng, lat) # Conservative Temperature CT = gsw.CT_from_t(SA, self.data['Temp'], self.data['Pres']) # Sigma(density) with reference pressure of 0 dbar sigma = gsw.sigma0(SA, CT) # Depth depth = list(map(abs, gsw.z_from_p(self.data['Pres'], lat))) return {'PracticalSalinity' : SP, 'AbsoluteSalinity' : SA, 'ConservativeTemperature' : CT, 'Sigma(density)' : sigma, 'Depth' : depth}
def convert_ts(datadir, filename): """ Function to convert Practical salinity to Absolute salinity and Conservative Temperature to potential temperature """ ls = os.listdir(datadir) ls.sort() for filename in ls: if filename[:11] == 'SK_20181210': # print(filename) data = xr.open_dataset(os.path.join(datadir, filename)) print(filename) data['ab_sal']=('DEPTH', \ gsw.SA_from_SP(data.PSAL,data.DEPTH,data.lon.values,data.lat.values)) data['ptemp']=('DEPTH', \ gsw.pt_from_CT(data.ab_sal,data.TEMP)) data['ab_sal_bal']=('DEPTH', \ gsw.SA_from_SP_Baltic(data.PSAL,data.lon.values,data.lat.values)) data['ptemp_bal']=('DEPTH', \ gsw.pt_from_CT(data.ab_sal_bal,data.TEMP)) data.to_netcdf( os.path.join('Data/ctd_files/gridded_calibrated_updated', filename))
def SA_from_C(C, t, p, lon, lat): SP = gsw.conversions.SP_from_C(C, t, p) return gsw.SA_from_SP_Baltic(SP, lon, lat)
#### ---------- MSS cast --------- #### # CTD casts before storm (pd is powerful!)) lat = 55 lon = 16 Pbin = np.arange(0.5, 85, 1) MSS_S1_1_dict = loadmat('./data/MSS_DATA/S1_1.mat', squeeze_me=True, struct_as_record=False) Zmss = MSS_S1_1_dict['CTD'][2].P Tmss = MSS_S1_1_dict['CTD'][2].T Smss = MSS_S1_1_dict['CTD'][2].S digitized = np.digitize(Zmss, Pbin) #<- this is awesome! TTbin = np.array([Tmss[digitized == i].mean() for i in range(0, len(Pbin))]) SSbin = np.array([Smss[digitized == i].mean() for i in range(0, len(Pbin))]) SA_MSS_01 = gsw.SA_from_SP_Baltic(SSbin, lon, lat) CT_MSS_01 = gsw.CT_from_t(SA_MSS_01, TTbin, Pbin) SIG0_MSS_01 = gsw.sigma0(SA_MSS_01, CT_MSS_01) N2_MSS_01 = gsw.Nsquared(SA_MSS_01, CT_MSS_01, Pbin, lat) import SW_extras as swe N2_01 = N2_MSS_01[0] zN2_01 = N2_MSS_01[1] N2_period_01 = 60.0 / swe.cph(N2_01) MSS_S1_2_dict = loadmat('./data/MSS_DATA/S1_2.mat', squeeze_me=True, struct_as_record=False) Zmss = MSS_S1_2_dict['CTD'][2].P Tmss = MSS_S1_2_dict['CTD'][2].T Smss = MSS_S1_2_dict['CTD'][2].S digitized = np.digitize(Zmss, Pbin) #<- this is awesome!
def abs_suolaisuus(salt_p, lon, lat): a_salt = gsw.SA_from_SP_Baltic(salt_p, lon, lat) return np.asarray(a_salt)