def rain_runoff_calculate_here(year_N, dirname, mask_rg, area_in, year_ref): ts_rain_test = np.zeros((year_N * 12)) ts_runoff_test = np.zeros((year_N * 12)) NN = 0 for NY in range(year_N): for NM in range(12): filename = 'P_R_' + str(year_ref + NY + 31) + '-' + str(NM + 1).zfill(2) + '.nc' print(filename) f = Dataset(dirname + filename, mode='r') P_in = f.variables['RAIN'][:, :, :].data.squeeze( ) + f.variables['SNOW'][:, :, :].data.squeeze() P_in[P_in > 1.e11] = np.nan R_in = f.variables['QCHANR'][:, 176, 248].data.squeeze() # R_in[R_in>1.e11] = np.nan f.close() ts_rain_test[NN] = np.nansum( P_in[:, :] * mask_rg * area_in) / np.nansum(mask_rg * area_in) # ts_runoff_test[NN] = np.nansum(R_in[:,:]*mask_rg*area_in)#/np.nansum(mask_rg*area_in) ts_runoff_test[NN] = R_in NN = NN + 1 ts_rmean = ts_rain_test.copy() ts_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_rain_test.copy(), np.ones((N, )) / N, mode='valid') rain_monthly = ts_rmean.reshape((year_N, 12)) [rain_max_test, rain_min_test] = max_min_select(rain_monthly, year_N) [rain_test_trend, rain_test_sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( np.linspace(1, year_N, year_N)[0:-2], rain_max_test[1:-1] - rain_min_test[1:-1], sig_level) ts_rmean = ts_runoff_test.copy() ts_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_runoff_test.copy(), np.ones((N, )) / N, mode='valid') runoff_monthly = ts_rmean.reshape((year_N, 12)) [runoff_max_test, runoff_min_test] = max_min_select(runoff_monthly, year_N) [runoff_test_trend, runoff_test_sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( np.linspace(1, year_N, year_N)[0:-2], runoff_max_test[1:-1] - runoff_min_test[1:-1], sig_level) return rain_test_trend, rain_test_sig, runoff_test_trend, runoff_test_sig, rain_max_test - rain_min_test
def sss_calculate_here(TLONG, TLAT, sss_test): # Regrid lon = np.linspace(0, 360, 360) lat = np.linspace(-79, 89, 180) [gridx_out, gridy_out] = np.meshgrid(lon, lat) ny = 180 nx = 360 [gridx_in, gridy_in] = [TLONG, TLAT] nt = sss_test.shape[0] sss_test_interp = np.zeros((nt, ny, nx)) for NT in range(nt): print('Ocean Salinity') print(NT) sss_test_interp[NT, :, :] = griddata( (gridy_in.ravel(), gridx_in.ravel()), np.squeeze(sss_test[NT, :, :]).ravel(), (gridy_out, gridx_out), method='nearest') # Select region lon_amz1 = 295 lon_amz2 = 311 lat_amz1 = -3 lat_amz2 = 15 mask_sss = (sss_test_interp[11, :, :] / sss_test_interp[11, :, :]).copy() [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1, lat_amz2, lon, lat) area_so = data_process_f.area_calculate_nonuniform(lon, lat) ts_sss_test = np.zeros((nt)) for NT in range(nt): ts_sss_test[NT] = np.nansum( sss_test_interp[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum( area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1]) ts_rmean = ts_sss_test.copy() ts_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_sss_test.copy(), np.ones((N, )) / N, mode='valid') sss_monthly = ts_rmean.reshape((int(nt / 12), 12)) [sss_max_test, sss_min_test] = max_min_select(sss_monthly, int(nt / 12)) [sss_test_trend, sss_test_sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( np.linspace(1, nt / 12, nt / 12)[0:-2], sss_max_test[1:-1] - sss_min_test[1:-1], sig_level) return sss_test_trend, sss_test_sig, pvalue
ts_test_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve( ts_test, np.ones((N, )) / N, mode='valid') # Arrange to monthly data ts_monthly_soda = ts_test_rmean.reshape((year_N - 1, 12)) # Arrange max and min values [sss_soda_max, sss_soda_min] = max_min_select(ts_monthly_soda, year_N - 1) print(sss_soda_max.shape) print(year[1:-1]) print(sss_soda_max[1:-1]) ts_temp = sss_soda_max.copy() [ reg, sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( year[2:-1], ts_temp[1:-1], sig_level) print('SODA max', str(reg * 10), sig) ts_temp = sss_soda_min.copy() [ reg, sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( year[2:-1], ts_temp[1:-1], sig_level) print('SODA min', str(reg * 10), sig) ts_temp = (sss_soda_max - sss_soda_min).copy() [ reg, sig, pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( year[2:-1], ts_temp[1:-1], sig_level) print('SODA max-min', str(reg * 10), sig) # ================================================================
run_trend_core2 = np.zeros((len(core2_case_txt), 58)) run_sig_core2 = np.zeros((len(core2_case_txt), 58)) # ================================================================ # Read Argo data # ================================================================ for II in range(len(argo_case_txt)): [ts_max, ts_min] = read_here1(dirname, argo_case_txt[II]) nt = len(ts_max) # sss_argo_trends[II] = np.polyfit(np.linspace(1,nt,nt)[1:-1],ts_max[1:-1]-ts_min[1:-1],1)[0] # sss_argo_sig[II] = stats.ttest_ind(np.linspace(1,nt,nt)[1:-1]*0.,ts_max[1:-1]-ts_min[1:-1])[1] [ sss_argo_trends[II], sss_argo_sig[II], pvalue ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted( np.linspace(1, nt, nt)[1:-1], ts_max[1:-1] - ts_min[1:-1], sig_level) if II == 0: ts_argo_ciso = ts_max - ts_min if II == 1: ts_argo_iprc = ts_max - ts_min if II == 2: ts_argo_isas = ts_max - ts_min if II == 3: ts_argo_jamstec = ts_max - ts_min if II == 4: ts_argo_scripps = ts_max - ts_min sss_argo_trends[-1] = sss_argo_trends[:-1].mean() for II in range(len(core2_case_txt)): [ts_max, ts_min] = read_here1(dirname, core2_case_txt[II]) nt = len(ts_max[-31:]) # nt = len(ts_max[:]) [ sss_core2_trends[II], sss_core2_sig[II], pvalue