def prep_SST_index(index_name, nc, times_SST, calendar_SST, t_units_SST, season): """Read in particular SST index and calculate annual mean""" index_ts = nc.variables[index_name][:] index_annual_mean, years = calculate_annual_mean(index_ts, times_SST, calendar_SST, t_units_SST, season=season) return index_annual_mean, years
def prep_index(nc, index_name, times_SST, calendar_SST, t_units_SST, season): index_ts = nc.variables[index_name][:] index_annual_mean, years = calculate_annual_mean(index_ts, times_SST, calendar_SST, t_units_SST, season=season, decadal_mean=decadal_mean) index_annual_mean = (index_annual_mean - np.mean(index_annual_mean) ) / np.std(index_annual_mean) #index_running_mean = running_mean(index_annual_mean,N) return index_annual_mean
def prep_index(file_name, ts_name, season): """ Read in a monthly time series, take the annual mean and detrend """ nc = Dataset(file_name, 'r') ts = nc.variables[ts_name][:] times = nc.variables['times'][:] calendar = nc.variables['times'].calendar units = nc.variables['times'].units ts_am, years = calculate_annual_mean( ts, times, calendar, units, season=season, decadal_mean=decadal_mean) # annual mean # detrend t = np.arange(years.shape[0]) slope = stats.linregress(t, ts_am)[0] ts_am = ts_am - slope * t return ts_am, years
def __init__(self, index_name, nc, times_SST, calendar_SST, t_units_SST, N, season='JJA'): """ """ index_ts = nc.variables[index_name][:] index_annual_mean, years = calculate_annual_mean(index_ts, times_SST, calendar_SST, t_units_SST, season=season) index_running_mean = running_mean(index_annual_mean, N) self.index_running_mean = index_running_mean self.years = years
model_name] + '/' + model_name + '/tos/' list_of_files = files_in_directory( full_path, concat_directory=True, include_files_with=ML.ensemble_id[model_name]) tos_data, lats, lons, levs, times, calendar, t_units = read_in_variable( list_of_files[:], 'tos') else: file_name = [ '/network/group/aopp/met_data/MET003_ERA20C/data/tos/mon/tos_mon_ERA20C_2.5x2.5_189002-201012.nc' ] tos_data, lats, lons, levs, times, calendar, t_units = read_in_variable( file_name, 'sst') SST_am, years_SST = calculate_annual_mean(tos_data, times, calendar, t_units, season=season) # read in THF if model_name != 'ERA20C': data_path0 = '/network/aopp/hera/mad/patterson/CMIP6/data/piControl/' full_path = data_path0 + 'hfls/' + ML.model_institute[ model_name] + '/' + model_name + '/hfls/' list_of_files = [ full_path + 'hfls_Amon_' + model_name + '_piControl_' + ML.ensemble_id[model_name] + '_' + season + '.nc' ] hfls, lats_var, lons_var, levs_var, times_var, calendar_var, t_units_var = read_in_variable( list_of_files[:], 'hfls') full_path = data_path0 + 'hfss/' + ML.model_institute[
model_name] + '/' + model_name + '/tos/' list_of_files = files_in_directory( full_path, concat_directory=True, include_files_with=ML.ensemble_id[model_name]) tos_data, lats_SST, lons_SST, levs_SST, times_SST, calendar_SST, t_units_SST = read_in_variable( list_of_files[:], 'tos') else: file_name = [ '/network/group/aopp/met_data/MET003_ERA20C/data/tos/mon/tos_mon_ERA20C_2.5x2.5_189002-201012.nc' ] tos_data, lats_SST, lons_SST, levs_SST, times, calendar, t_units = read_in_variable( file_name, 'sst') SST_am, years_SST = calculate_annual_mean(tos_data, times_SST, calendar_SST, t_units_SST, season=season) # set all unreasonably large values to NaN SST_am[np.abs(SST_am) > 1e3] = np.nan # read in THF if model_name != 'ERA20C': data_path0 = '/network/aopp/hera/mad/patterson/CMIP6/data/piControl/' full_path = data_path0 + 'hfls/' + ML.model_institute[ model_name] + '/' + model_name + '/hfls/' list_of_files = [ full_path + 'hfls_Amon_' + model_name + '_piControl_' + ML.ensemble_id[model_name] + '_' + season + '.nc' ]
# truncate times so that only a common time period is used #earliest_common_time = np.min(times_SST) #if np.min(times_SST) != np.min(times_psl) & (np.min(times_psl) > np.min(times_SST)): earliest_common_time = np.min(times_psl) #latest_common_time = np.max(times_SST) #if np.max(times_SST) != np.max(times_psl) & (np.max(times_psl) < np.max(times_SST)): latest_common_time = np.max(times_psl) #time_mask_SST = (times_SST>=earliest_common_time)&(times_SST<=latest_common_time) #time_mask_psl = (times_psl>=earliest_common_time)&(times_psl<=latest_common_time) #times_SST = times1[time_mask1] #times2 = times2[time_mask2] #variable1_data = variable1_data[time_mask1] #variable2_data = variable2_data[time_mask2] # calculate annual means of fields psl_am, years = calculate_annual_mean(psl_data, times_psl, calendar_psl, t_units_psl, season=season) pr_am, years = calculate_annual_mean(pr_data, times_pr, calendar_pr, t_units_pr, season=season) # perform low pass time filtering on fields N = 10 halfN = int(N / 2) psl_rm = running_mean(psl_am, N) pr_rm = running_mean(pr_am, N) # prepare SST indices
latest_common_time = np.max(times1) else: latest_common_time = np.max(times1) time_mask1 = (times1 >= earliest_common_time) & (times1 <= latest_common_time) time_mask2 = (times2 >= earliest_common_time) & (times2 <= latest_common_time) times1 = times1[time_mask1] times2 = times2[time_mask2] variable1_data = variable1_data[time_mask1] variable2_data = variable2_data[time_mask2] # calculate annual means variable1_JJA, years = calculate_annual_mean(variable1_data, times1, calendar1, t_units1, season='JJA') variable2_JJA, years = calculate_annual_mean(variable2_data, times2, calendar2, t_units2, season='JJA') # perform low pass time filtering variable1_rm = running_mean(variable1_JJA, N) variable2_rm = running_mean(variable2_JJA, N) # only use variable in region over East Asia for MCA lon_mask_EAsia = (lons1 >= lon_min) & (lons1 <= lon_max) lat_mask_EAsia = (lats1 >= lat_min) & (lats1 <= lat_max)
make_plots.plot_box(lon_min=110, lon_max=180, lat_min=20, lat_max=50) ax.set_aspect(2) return cs # select season season = 'JJA' # Read in HadGEM3 data and calculate annual means full_path_psl = '/network/group/aopp/predict/AWH007_BEFORT_CMIP6/piControl/MOHC/HadGEM3-GC31-LL/piControl/Amon/psl/gn/latest/' list_of_files_psl = files_in_directory(full_path_psl, concat_directory=True) psl_HadGEM, lats_HadGEM, lons_HadGEM, levs_HadGEM, times_HadGEM, calendar_HadGEM, t_units_HadGEM = read_in_variable( list_of_files_psl[:], 'psl') psl_am_HadGEM, years = calculate_annual_mean(psl_HadGEM, times_HadGEM, calendar_HadGEM, t_units_HadGEM, season=season) psl_am_HadGEM = 0.01 * psl_am_HadGEM # convert to hPa # Read in ERA-Interim and calculate annual means file_name = '/network/aopp/preds0/pred/data/Obs/Reanalysis/psl/mon/psl_mon_ERAInterim_1x1_197901-201512.nc' psl_ERAI, lats_ERAI, lons_ERAI, levs_ERAI, times_ERAI, calendar_ERAI, t_units_ERAI = read_in_variable( [file_name], 'msl') psl_am_ERAI, years = calculate_annual_mean(psl_ERAI, times_ERAI, calendar_ERAI, t_units_ERAI, season=season) psl_am_ERAI = 0.01 * psl_am_ERAI # convert to hPa
for i, model_name in enumerate(model_name_list): try: #data_path0 = '/network/aopp/hera/mad/patterson/iCMIP6/data/piControl/ts/' data_path0 = '/network/group/aopp/predict/AWH007_BEFORT_CMIP6/piControl/' #AWI/AWI-CM-1-1-MR/piControl/Amon/tas/gn/latest full_path = data_path0 + ML.model_institute[ model_name] + '/' + model_name + '/piControl/Amon/tas/gn/latest/' list_of_files = files_in_directory(full_path, concat_directory=True, exclude_files_with='ImonAnt') ts, lats_ts, lons_ts, levs_ts, times_ts, calendar_ts, t_units_ts = read_in_variable( list_of_files[:], 'tas') ts_am, years = calculate_annual_mean(ts, times_ts, calendar_ts, t_units_ts, season=season) ts_mean = global_mean(ts_am, lats_ts) ax = plt.subplot(gs[i]) plt.title(model_name, fontsize=20) plt.plot(ts_mean, color='r') plt.xlim(0, 1000) if i >= 14: plt.xlabel('Time (years)', fontsize=20) if i % 2 == 0: plt.ylabel('Global mean T (K)', fontsize=20) plt.xticks(fontsize=20) plt.yticks(fontsize=20)