def read_gecco2_salinity(year_st, year_ed): print('read GECCO2 salinity') year_ref = 1979 yearN = int(year_ed - year_st + 1) nt = int(yearN * 12) # lon_amz1 = 295 # lon_amz2 = 311 # lat_amz1 = -2 # lat_amz2 = 15 lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 t0 = (year_st - year_ref) * 12 t1 = (year_ed - year_ref) * 12 + 12 dirname = '/stormtrack/data4/yliang/observations/GECCO2/regrided/' filename = 'GECCO2_S_regridded.nc' f = Dataset(dirname + filename, 'r') lat_tmp = f.variables['lat'][:].data.copy() lon = f.variables['lon'][:].data.copy() so_tmp = f.variables['salt_rg'][t0:t1, :, :].data.copy() f.close() [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(0, 11, -30, 30, lon, lat_tmp) lat = lat_tmp[y1:y2 + 1].copy() so = so_tmp[:, y1:y2 + 1, :].copy() mask_so = np.nanmean(so, axis=0).copy() / np.nanmean(so, axis=0).copy() # mask_so[np.nanmean(so, axis=0)>38] = np.nan area_so = data_process_f.area_calculate_nonuniform(lon, lat) area_so[np.isnan(so[1, :, :]) == 1] = np.nan [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) # Remove climatology values # [so_rm, so_clim] = data_process_f.climatology_anomalies_calculate(so,1981,2010,year_st) ts_so = np.zeros((nt)) for NT in range(nt): ts_so[NT] = np.nansum( so[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] * mask_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_so[y1:y2 + 1, x1:x2 + 1]) return ts_so, lon, lat, mask_so
def read_smos_sss_here(): filename = '/stormtrack/data4/yliang/observations/SMOS/SMOS_SSS_2010_2016_V3.nc' f = Dataset(filename, 'r') sss = f.variables['sss_biasadj'][7:, ::-1, :].data lon_sss = f.variables['lon'][:].data lat_sss = f.variables['lat'][::-1].data f.close() area_sss = data_process_f.area_calculate_nonuniform(lon_sss, lat_sss) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(-65, -50, -3, 20, lon_sss, lat_sss) print(sss.shape) ts_sss_smos = np.zeros((sss.shape[0])) for NT in range(sss.shape[0]): ts_sss_smos[NT] = np.nansum( sss[NT, y1:y2 + 1, x1:x2 + 1] * area_sss[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_sss[y1:y2 + 1, x1:x2 + 1] * sss[NT, y1:y2 + 1, x1:x2 + 1] / sss[NT, y1:y2 + 1, x1:x2 + 1]) return ts_sss_smos
def read_oras4_salinity(year_st, year_ed, mask_sss, lon_sss, lat_sss): [so_oras, lon_oras, lat_oras, depth_oras] = ORAS4_data_process_f.read_variable(0, 360, -30, 30, 0, year_st, year_ed, 'so') print(depth_oras) # Interpolate to precipitation grid [grid_x, grid_y] = np.meshgrid(lon_sss, lat_sss) [lon_x, lat_y] = np.meshgrid(lon_oras, lat_oras) mask_rg = griddata((grid_y.ravel(), grid_x.ravel()), mask_sss.ravel(), (lat_y, lon_x), method='nearest') lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 area_so = data_process_f.area_calculate_nonuniform(lon_oras, lat_oras) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon_oras, lat_oras) # mask_rg[np.isnan(mask_rg)==False] = 1. ts_so = np.zeros((so_oras.shape[0])) for NT in range(len(ts_so)): ts_so[NT] = np.nansum( so_oras[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) return ts_so
def read_en4_salinity(year_st,year_ed,mask_sss,lon_sss,lat_sss): print('EN4 salinity') year_ref = 1979 t0 = (year_st - year_ref)*12 t1 = (year_ed - year_ref)*12 + 12 filename = '/stormtrack/data4/yliang/observations/EN4/EN4_2_1/EN.4.2.1.f.analysis.g10.197901_201812.nc' f = Dataset(filename, 'r') lon_en4 = f.variables['lon'][:].data lat_en4 = f.variables['lat'][:].data sss = f.variables['salinity'][t0:t1,0,:,:].data f.close() # Interpolate to precipitation grid [grid_x, grid_y] = np.meshgrid(lon_sss, lat_sss) [lon_x, lat_y] = np.meshgrid(lon_en4, lat_en4) mask_rg = griddata((grid_y.ravel(), grid_x.ravel()), mask_sss.ravel(), (lat_y, lon_x), method='nearest') lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 area_so = data_process_f.area_calculate_nonuniform(lon_en4, lat_en4) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_en4,lat_en4) ts_so = np.zeros((sss.shape[0])) for NT in range(len(ts_so)): ts_so[NT] = np.nansum(sss[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1]) return ts_so
def read_aquarius_salinity(): dirname = '/stormtrack/data4/yliang/observations/Aquarius/' filename = 'sss201304.v5.0cap.nc' f = Dataset(dirname + filename, 'r') lon_sss = f.variables['lon'][:].data lat_sss = f.variables['lat'][:].data f.close() sss_tmp = np.zeros((36,len(lat_sss),len(lon_sss))) NN = 0 for NY in range(3): for NM in range(12): filename = 'sss' + str(2012+NY) + str(NM+1).zfill(2) + '.v5.0cap.nc' f = Dataset(dirname + filename, 'r') sss_tmp[NN,:,:] = f.variables['sss_cap'][:,:].data.squeeze() NN = NN + 1 sss_tmp[sss_tmp<-100] = np.nan mask_rg = np.nanmean(sss_tmp, axis=0).copy()/np.nanmean(sss_tmp, axis=0).copy() lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 area_so = data_process_f.area_calculate_nonuniform(lon_sss, lat_sss) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_sss,lat_sss) ts_so = np.zeros((sss_tmp.shape[0])) for NT in range(len(ts_so)): ts_so[NT] = np.nansum(sss_tmp[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1]) return ts_so
def read_gecco2_salinity(year_st, year_ed): print('read GECCO2 salinity') year_ref = 1979 yearN = int(year_ed - year_st + 1) nt = int(yearN * 12) lon_amz1 = 295 lon_amz2 = 310 lat_amz1 = -3 lat_amz2 = 15 # lon_amz1 = 300 # lon_amz2 = 318 # lat_amz1 = -2 # lat_amz2 = 11 t0 = (year_st - year_ref) * 12 t1 = (year_ed - year_ref) * 12 + 12 dirname = '/stormtrack/data4/yliang/observations/GECCO2/regrided/' filename = 'GECCO2_S_regridded.nc' f = Dataset(dirname + filename, 'r') lat = f.variables['lat'][:].data.copy() lon = f.variables['lon'][:].data.copy() so = f.variables['salt_rg'][t0:t1, :, :].data.copy() f.close() so_mask = so[111, :, :].copy() * 0. so_mask_tmp = so_mask.copy() so_mask_tmp[np.isnan(so_mask_tmp) == False] = 1 area_so = data_process_f.area_calculate_nonuniform(lon, lat) area_so[np.isnan(so[1, :, :]) == 1] = np.nan [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) # Remove climatology values # [so_rm, so_clim] = data_process_f.climatology_anomalies_calculate(so,1981,2010,year_st) ts_so = np.zeros((nt)) for NT in range(nt): ts_so[NT] = np.nansum(so[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1]) so_mask[y1 - 1:y2 + 1, x1:x2 + 1] = 1. so_mask = so_mask_tmp * so_mask so_mask[np.isnan(so_mask) == True] = 0. # plt.contourf(lon,lat,so_mask) # plt.colorbar() # plt.show() # sys.exit() return ts_so, lat, lon, so_mask
def sss_calculate_here(TLONG, TLAT, sss_test): # Regrid lon = np.linspace(0, 360, 360) lat = np.linspace(-79, 89, 180) [gridx_out, gridy_out] = np.meshgrid(lon, lat) ny = 180 nx = 360 [gridx_in, gridy_in] = [TLONG, TLAT] nt = sss_test.shape[0] sss_test_interp = np.zeros((nt, ny, nx)) for NT in range(nt): print('Ocean Salinity') print(NT) sss_test_interp[NT, :, :] = griddata( (gridy_in.ravel(), gridx_in.ravel()), np.squeeze(sss_test[NT, :, :]).ravel(), (gridy_out, gridx_out), method='nearest') # Select region lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 mask_sss = (sss_test_interp[11, :, :] / sss_test_interp[11, :, :]).copy() [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) area_so = data_process_f.area_calculate_nonuniform(lon, lat) ts_sss_test = np.zeros((nt)) for NT in range(nt): ts_sss_test[NT] = np.nansum( sss_test_interp[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1]) ts_rmean = ts_sss_test.copy() ts_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_sss_test.copy(), np.ones((N, )) / N, mode='valid') sss_monthly = ts_rmean.reshape((int(nt / 12), 12)) [sss_max_test, sss_min_test] = max_min_select(sss_monthly, int(nt / 12)) [sss_test_trend, sss_test_sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( np.linspace(1, nt / 12, nt / 12)[0:-2], sss_max_test[1:-1] - sss_min_test[1:-1], sig_level) return sss_test_trend, sss_test_sig, pvalue
def read_ecco_salinity(year_st, year_ed, mask_sss, lon_sss, lat_sss): year_ref = 1992 yearN = int(year_ed - year_ref + 1) filename = '/stormtrack/data4/yliang/observations/ECCO/SALT.2011.nc' f = Dataset(filename, 'r') lon_tmp = f.variables['lon'][:, :].data lat_tmp = f.variables['lat'][:, :].data f.close() lon_ecco = lon_tmp[1, :].copy() lat_ecco = lat_tmp[:, 1].copy() ny = len(lat_ecco) nx = len(lon_ecco) sss = np.zeros((yearN * 12, ny, nx)) NN = 0 for NY in range(yearN): for NM in range(12): filename = '/stormtrack/data4/yliang/observations/ECCO/ECCO_v4r4/SALT_' + str( int(NY + year_ref)) + '_' + str(NM + 1).zfill(2) + '.nc' f = Dataset(filename, 'r') sss[NN, :, :] = f.variables['SALT'][:, 0, :, :].data.squeeze() f.close() NN = NN + 1 lon_amz1 = -65 lon_amz2 = -49 lat_amz1 = -3 lat_amz2 = 15 area_so = data_process_f.area_calculate_nonuniform(lon_ecco, lat_ecco) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon_ecco, lat_ecco) mask_rg = np.nanmean(sss, axis=0).copy() / np.nanmean(sss, axis=0).copy() ts_so = np.zeros((sss.shape[0])) for NT in range(len(ts_so)): ts_so[NT] = np.nansum( sss[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) return ts_so
def sss_calculate_here(TLONG, TLAT, sss_test): # Regrid lon = np.linspace(0, 360, 360) lat = np.linspace(-79, 89, 180) [gridx_out, gridy_out] = np.meshgrid(lon, lat) ny = 180 nx = 360 [gridx_in, gridy_in] = [TLONG, TLAT] nt = sss_test.shape[0] sss_test_interp = np.zeros((nt, ny, nx)) for NT in range(nt): print('Ocean Salinity') print(NT) sss_test_interp[NT, :, :] = griddata( (gridy_in.ravel(), gridx_in.ravel()), np.squeeze(sss_test[NT, :, :]).ravel(), (gridy_out, gridx_out), method='nearest') # Select region lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 mask_sss = (sss_test_interp[11, :, :] / sss_test_interp[11, :, :]).copy() [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) area_so = data_process_f.area_calculate_nonuniform(lon, lat) ts_sss_test = np.zeros((nt)) for NT in range(nt): ts_sss_test[NT] = np.nansum( sss_test_interp[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1]) return ts_sss_test
filename = 'soda3.3.1_mn_ocean_reg_' + str(int(1980 + NY)) + '.nc' print(filename) f = Dataset(dirname + filename, mode='r') var_tmp = f.variables['salt'][:, 0, :, :].data.squeeze() var_tmp[abs(var_tmp) > 1.e19] = np.nan f.close() var_test[0 + NY * 12:12 + NY * 12, :, :] = var_tmp.copy() # Calculate time series lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) ts_test = np.zeros((year_N * 12 - 12)) for NT in range(year_N * 12 - 12): ts_test[NT] = np.nansum( var_test[NT, y1:y2 + 1, x1:x2 + 1] * area[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1]) # Perform 3-month running average N = 3 ts_test_rmean = ts_test.copy() ts_test_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_test, np.ones((N, )) / N, mode='valid') # Arrange to monthly data
# Interpolate to precipitation grid # lon_amz1 = 295 # lon_amz2 = 311 # lat_amz1 = -3 # lat_amz2 = 15 lon_amz1 = 0 lon_amz2 = 360 lat_amz1 = -65 lat_amz2 = 65 mask_sss = sss_uncertainty[111,:,:]/sss_uncertainty[111,:,:] area_so = data_process_f.area_calculate_nonuniform(lon_en4, lat_en4) [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_en4,lat_en4) ts_uncertainty = np.zeros((sss_uncertainty.shape[0])) ts_obs_weight = np.zeros((sss_obs_weight.shape[0])) for NT in range(len(ts_uncertainty)): ts_uncertainty[NT] = np.nansum(sss_uncertainty[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1]) ts_obs_weight[NT] = np.nansum(sss_obs_weight[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1]) # ================================================================ # Plot figures # ================================================================ fig = plt.figure() fig.set_size_inches(10, 10, forward=True) plt.axes([0.15, 0.55, 0.65, 0.35]) plt.plot(ttt, ts_obs_weight, 'r-', linewidth=0.7)