numclus = 4 reg_names_area = dict() reg_names_area['EAT'] = ['NAO+', 'SBL', 'AR', 'NAO-'] reg_names_area['PNA'] = ['PT', 'PNA+', 'PNA-', 'AR'] allssps = 'ssp126 ssp245 ssp370 ssp585'.split() area = 'EAT' for area in ['EAT', 'PNA']: pdfssp = dict() cart_out = cart_out_orig + '{}_NDJFM/'.format(area) ctl.mkdir(cart_out) results_hist, results_ref = ctl.load_wrtool(file_hist.format(area)) res_hist_refEOF, _ = ctl.load_wrtool(file_hist_refEOF.format(area)) # Erasing incomplete runs for ke in tuple(results_hist.keys()): if len(results_hist[ke]['labels']) < 7000: del results_hist[ke] results_ssp = dict() for ssp in allssps: results_ssp[ssp], _ = ctl.load_wrtool(gen_file_ssp.format(ssp, area)) # Erasing incomplete runs for ke in tuple(results_ssp[ssp].keys()): if len(results_ssp[ssp][ke]['labels']) < 7000: del results_ssp[ssp][ke]
#dtrtyp = 'light' dtrtyp = 'histrebase' cart_out_orig = '/home/fabiano/Research/lavori/CMIP6/Results_v6_eceens/' ctl.mkdir(cart_out_orig) cart_in = '/data-hobbes/fabiano/WR_CMIP6/' #file_hist = cart_in + 'out_NEW_cmip6_hist_NDJFM_{}_4clus_4pcs_1964-2014_refCLUS_dtr_light.p' file_hist = cart_in + 'out_eceens_hist_NDJFM_{}_4clus_4pcs_1964-2014_refCLUS_dtr.p' file_hist_refEOF = cart_in + 'out_eceens_hist_NDJFM_{}_4clus_4pcs_1964-2014_refEOF_dtr.p' gen_file_ssp = cart_in + 'out_eceens_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr_reb.p' gen_file_ssp_noreb = cart_in + 'out_eceens_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr.p' area = 'EAT' ssp = 'ssp585' reshist, resref = ctl.load_wrtool(file_hist.format(area)) #reshist_re, _ = ctl.load_wrtool(file_hist.format(area)) resssp, _ = ctl.load_wrtool(gen_file_ssp.format(ssp, area)) resssp_noreb, _ = ctl.load_wrtool(gen_file_ssp_noreb.format(ssp, area)) histbases = [] for mod in reshist.keys(): histbases.append( np.mean(reshist[mod]['climate_mean'][:, 50:70, -8], axis=0)) lat = reshist[mod]['lat'][50:70] lon = reshist[mod]['lon'][50:70] sspbases = [] for mod in resssp_noreb.keys(): sspbases.append(
plt.rcParams['xtick.labelsize'] = 15 plt.rcParams['ytick.labelsize'] = 15 titlefont = 24 plt.rcParams['figure.titlesize'] = titlefont plt.rcParams['axes.titlesize'] = 18 plt.rcParams['axes.labelsize'] = 18 ############################################################################# cart_out = '/home/fedef/Research/lavori/CMIP6/Clusters_of_regime_hist/' ctl.mkdir(cart_out) for area in ['EAT', 'PNA']: filox = '/home/fedef/Research/lavori/CMIP6/cmip6_hist/out_cmip6_hist_NDJFM_{}_4clus_4pcs_1964-2014_refEOF_dtr.p'.format( area) coso, coso_ref = ctl.load_wrtool(filox) lat = coso['BCC-CSM2-MR_r1i1p1f1']['lat'] lon = coso['BCC-CSM2-MR_r1i1p1f1']['lon'] for reg in range(4): cluspatterns = [coso[mo]['cluspattern'][reg] for mo in coso.keys()] centroids, labels, patts, repres, distances = cd.EnsClus_light( cluspatterns, lat, numclus=4, numpcs=4, flag_perc=False, perc=None) filename = cart_out + 'clu2_{}_{}.pdf'.format(area, reg) ctl.plot_multimap_contour(patts, lat, lon, filename,
clatlo = dict() clatlo['EAT'] = (70., -20.) clatlo['PNA'] = (70., -120.) #allssps = 'ssp119 ssp126 ssp245 ssp370 ssp585'.split() #allssps = 'ssp126 ssp245 ssp370 ssp585'.split() allssps = ['ssp585'] #oknam = ['EC-Earth3_r1i1p1f1', 'EC-Earth3_r4i1p1f1', 'MPI-ESM1-2-LR_r1i1p1f1'] ttests = dict() for area in ['EAT', 'PNA']: cart_out = cart_out_orig + '{}_NDJFM/'.format(area) ctl.mkdir(cart_out) results_hist, results_ref = ctl.load_wrtool(file_hist_tot.format(area)) histme = np.mean( [results_hist[ke]['freq_clus'] for ke in results_hist.keys()], axis=0) del results_hist rescoso = dict() for tip in tips: rescoso[(tip, 'hist')], _ = ctl.load_wrtool(fil_hist.format(tip, tip, area)) rescoso[(tip, 'ssp585')], _ = ctl.load_wrtool( fil_ssp.format(tip, tip, area)) # appiccico hist e ssp #for cosone, histmem in zip([ece_ssp, ece_ssp_r4, mpi_ssp], ['EC-Earth3_r1i1p1f1', 'EC-Earth3_r4i1p1f1', 'MPI-ESM1-2-LR_r1i1p1f1']): resdict = dict() for tip in tips:
titlefont = 24 plt.rcParams['figure.titlesize'] = titlefont plt.rcParams['axes.titlesize'] = 18 plt.rcParams['axes.labelsize'] = 18 ############################################################################# if os.uname()[1] == 'hobbes': cart_in = '/home/fabiano/Research/lavori/WeatherRegimes/' cart_out = '/home/fabiano/Research/lavori/prima_D45/' elif os.uname()[1] == 'ff-clevo': cart_out = '/home/fedefab/Scrivania/Research/Post-doc/lavori/prima_D45/' fil_pres = 'prima_D45_pres/out_prima_D45_pres_DJF_EAT_4clus_4pcs_1979-2014_refEOF_dtr.p' fil_fut = 'prima_D45_fut/out_prima_D45_fut_DJF_EAT_4clus_4pcs_2015-2050_refEOF_dtr.p' results_pres, results_ref = ctl.load_wrtool(cart_in + fil_pres) results_fut, _ = ctl.load_wrtool(cart_in + fil_fut) cart_data = '/nas/PRIMAVERA/Stream1/' filtas = 'highresSST-{}/{}/{}/day/{}/{}_day_{}_highresSST-{}_{}_*_r25_rc.nc' #filtas.format(temp, mod, mem, varnam, varnam, mod, temp, mem) # composites = dict() # mod = 'ref' # temp = 'present' # file_ref = dict() # file_ref['tas'] = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/ERAInt_daily_1979-2018_167_r25.nc' # file_ref['pr'] = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/ERAInt_daily_1979-2018_228_pr_daysum_ok_r25.nc' # for varnam in ['tas', 'pr']: # var, coords, aux_info = ctl.read_iris_nc(file_ref[varnam]) # var_season, dates_season = ctl.sel_season(var, coords['dates'], 'DJF')
file_refit = cart_in + 'out_NEW_cmip6_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr_refit.p' file_refit_2 = cart_in + 'out_NEW_cmip6_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr_refit_rebasetot.p' fil_ece_ssp = cart_in + 'out_eceens_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr.p' filref = '/home/fabiano/Research/lavori/WeatherRegimes/ERA_ref_r25_v4/out_ERA_NDJFM_{}_4clus_4pcs_1964-2014_dtr.p' numclus = 4 reg_names_area = dict() reg_names_area['EAT'] = ['NAO+', 'SBL', 'AR', 'NAO-'] reg_names_area['PNA'] = ['PT', 'PNA+', 'PNA-', 'AR'] area = 'EAT' allssps = 'ssp126 ssp245 ssp370 ssp585'.split() for area in ['EAT', 'PNA']: results_hist, results_ref = ctl.load_wrtool(file_hist.format(area)) results_ref = pickle.load(open(filref.format(area), 'rb')) # del results_ref['var_glob'] # del results_ref['var_area'] # del results_ref['solver'] for mod in results_hist.keys(): #del results_hist[mod]['var_glob'] del results_hist[mod]['var_area'] del results_hist[mod]['solver'] restot = dict() restot['models'] = results_hist restot['reference'] = results_ref for ssp in ['ssp585']: print(ssp)
if type(inputs['ERA_ref_orig']) is list: ref_cube = xr.load_dataset(inputs['ERA_ref_orig'][0]) elif '*' in inputs['ERA_ref_orig']: filsref = glob.glob(inputs['ERA_ref_orig']) ref_cube = xr.load_dataset(filsref[0]) else: ref_cube = xr.load_dataset(inputs['ERA_ref_orig']) else: ref_cube = None if inputs['rebase_to_historical']: if inputs['file_hist_rebase'] is not None: print('Loading historical climate mean from {}\n'.format(inputs['file_hist_rebase'])) clim_rebase = dict() dates_clim_rebase = dict() results_hist, _ = ctl.load_wrtool(inputs['file_hist_rebase']) if inputs['cmip6_naming']: # Per consistenza uso sempre il primo member dello storico okmodhist = np.unique([ke.split('_')[0] for ke in results_hist]) for mod in okmodhist: okke = [ke for ke in results_hist if ke.split('_')[0] == mod] if np.any(['r1i' in ke for ke in okke]): ke = [kee for kee in okke if 'r1i' in kee][0] else: ke = okke[0] clim_rebase[mod] = results_hist[ke]['climate_mean'] dates_clim_rebase[mod] = results_hist[ke]['climate_mean_dates'] else: for ke in results_hist: clim_rebase[ke] = results_hist[ke]['climate_mean'] dates_clim_rebase[ke] = results_hist[ke]['climate_mean_dates']
# # cmean, dates_cm, _ = ctl.daily_climatology(zg_dtr, dates_seas, window = 20) # climmeans.append(cmean) # # climate_mean[mod] = np.mean(climmeans, axis = 0) # climate_std[mod] = np.std(climmeans, axis = 0) # # climate_mean_dates[mod] = dates_cm # num_members[mod] = len(climmeans) # # pickle.dump([climate_mean, climate_mean_dates, climate_std, num_members], open(cart_out + 'climate_mean_hist_p2.p', 'wb')) climate_mean, climate_mean_dates, climate_std, num_members = pickle.load(open(cart_out + 'climate_mean_hist_p2.p', 'rb')) for area in ['EAT', 'PNA']: res_old, _ = ctl.load_wrtool('/data-hobbes/fabiano/WR_CMIP6/out_NEW_cmip6_hist_NDJFM_{}_4clus_4pcs_1964-2014_refCLUS_dtr_light.p'.format(area)) figs = [] for mod in okmods_mo: modmem = [ke for ke in res_old.keys() if mod in ke][0] lat = res_old[modmem]['lat'] lon = res_old[modmem]['lon'] newcoso = np.mean(climate_mean[mod], axis = 0).squeeze() oldcoso = np.mean(res_old[modmem]['climate_mean'], axis = 0) fig = ctl.plot_map_contour(newcoso-oldcoso, lat, lon, filename = None, visualization = 'standard', central_lat_lon = None, cmap = 'RdBu_r', title = mod, xlabel = None, ylabel = None, cb_label = None, cbar_range = (-60, 60), plot_anomalies = True, n_color_levels = 21, draw_contour_lines = False, n_lines = 5, color_percentiles = (0,100), bounding_lat = 30, plot_margins = area, add_rectangles = None, draw_grid = True, plot_type = 'filled_contour', verbose = False, lw_contour = 0.5) figs.append(fig) ctl.plot_pdfpages(cart_out + 'map_ensrebase_diff_{}.pdf'.format(area), figs) for area in ['EAT', 'PNA']:
clatlo['EAT'] = (70., -20.) clatlo['PNA'] = (70., -120.) #allssps = 'ssp119 ssp126 ssp245 ssp370 ssp585'.split() allssps = 'ssp126 ssp245 ssp370 ssp585'.split() ttests = dict() cosette = dict() area = 'EAT' for area in ['EAT', 'PNA']: cart_out = cart_out_orig + '{}_NDJFM/'.format(area) ctl.mkdir(cart_out) reg_names = reg_names_area[area] results_hist, results_ref = ctl.load_wrtool(file_hist.format(area)) trend_ssp, residtime_ssp = pickle.load( open( cart_v5.format(area) + 'trends_wrfreq_e_restime_{}.p'.format(area), 'rb')) seasfreq, runfreq = pickle.load( open(cart_v5.format(area) + 'seasfreqs_{}_v4.p'.format(area), 'rb')) freqs, residtimes, patterns, num_event = pickle.load( open( cart_v5.format(area) + 'allresults_dicts_{}_v3.p'.format(area), 'rb')) freqs_cmip5, trend_ssp_cmip5, residtimes_cmip5, num_event_cmip5 = pickle.load( open(cart_cmip5.format(area) + 'freqs_cmip5_{}.p'.format(area), 'rb')) seasfreq_cmip5, runfreq_cmip5 = pickle.load(
reg_names_area['PNA'] = ['PT', 'PNA+', 'PNA-', 'BR'] clatlo = dict() clatlo['EAT'] = (70., -20.) clatlo['PNA'] = (70., -120.) #allssps = 'ssp119 ssp126 ssp245 ssp370 ssp585'.split() #allssps = 'ssp126 ssp245 ssp370 ssp585'.split() allssps = ['ssp585'] ttests = dict() for area in ['EAT', 'PNA']: cart_out = cart_out_orig + '{}_NDJFM/'.format(area) ctl.mkdir(cart_out) results_hist, results_ref = ctl.load_wrtool(file_hist.format(area)) # Erasing incomplete runs for ke in tuple(results_hist.keys()): if len(results_hist[ke]['labels']) < 7000: del results_hist[ke] results_ssp, _ = ctl.load_wrtool(file_ssp.format(ssp, area)) results_ssp_rebase, _ = ctl.load_wrtool(file_ssp_rebase.format(ssp, area)) results_refit, _ = ctl.load_wrtool(file_refit.format(ssp, area)) results_refit2, _ = ctl.load_wrtool(file_refit2.format(ssp, area)) # Adding the ece ensemble ece_ssp, _ = ctl.load_wrtool(fil_ece_ssp.format(ssp, area)) ece_ssp_rebase, _ = ctl.load_wrtool(fil_ece_ssp_rebase.format(ssp, area))
####################################### cart_in = '/home/fabiano/Research/lavori/prima_regimes_KS/' plot_sig = True plot_mean = True for numclus in [3, 4, 5, 6]: cart_out = cart_in + 'plots_k{}/'.format(numclus) if not os.path.exists(cart_out): os.mkdir(cart_out) #filon = open(cart_in + 'res_bootstrap_v7_KJ_k{}.p'.format(numclus), 'rb') filon_ref = open(cart_in + 'res_bootstrap_v7_KJ_ref_k{}.p'.format(numclus), 'rb') filogen = cart_in + 'out_prima_coup_v7_DJF_EAT_4clus_4pcs_1957-2014_refEOF_FILTEREDKJ_k{}.p'.format( numclus) results, results_ref = ctl.load_wrtool(filogen) popke = [ 'HadGEM3-GC31-LL_r1i1p2f1', 'EC-Earth3P_r1i1p1f1', 'EC-Earth3P-HR_r1i1p1f1' ] for ke in popke: if ke in results: results.pop(ke) allresmembers = list(results.keys()) all_mods = np.array([ke.split('_')[0] for ke in results.keys()] + ['ERA']) all_mems = np.array([ke.split('_')[1] for ke in results.keys()] + ['0']) del results
gen_file_ssp = cart_in + 'out_NEW_cmip6_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr.p' file_rebase = cart_in + 'out_NEW_cmip6_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr_histrebase.p' file_light = cart_in + 'out_NEW_cmip6_{}_NDJFM_{}_4clus_4pcs_2015-2100_refCLUS_dtr_light.p' filref = '/home/fabiano/Research/lavori/WeatherRegimes/ERA_ref_r25_v4/out_ERA_NDJFM_{}_4clus_4pcs_1964-2014_dtr.p' numclus = 4 reg_names_area = dict() reg_names_area['EAT'] = ['NAO+', 'SBL', 'AR', 'NAO-'] reg_names_area['PNA'] = ['PT', 'PNA+', 'PNA-', 'AR'] area = 'EAT' allssps = 'ssp126 ssp245 ssp370 ssp585'.split() for area in ['EAT', 'PNA']: print(area) results_hist, results_ref = ctl.load_wrtool(file_hist.format(area)) results_ref = pickle.load(open(filref.format(area), 'rb')) del results_ref['var_glob'] del results_ref['var_area'] ref_solver = results_ref['solver'] for mod in results_hist.keys(): if mod == 'MPI-ESM1-2-LR_r1i1p1f1': continue del results_hist[mod]['var_glob'] del results_hist[mod]['var_area'] del results_hist[mod]['solver'] results_ssp, _ = ctl.load_wrtool(gen_file_ssp.format('ssp585', area)) for mod in results_ssp.keys(): if mod == 'MPI-ESM1-2-LR_r1i1p1f1':
colormip = dict() colormip[('cmip5', 'EAT')] = ctl.color_set(7)[4] colormip[('cmip6', 'EAT')] = ctl.color_set(7)[0] colormip[('cmip5', 'PNA')] = ctl.color_set(7)[3] colormip[('cmip6', 'PNA')] = ctl.color_set(7)[6] area = 'EAT' plocos = dict() res_short = dict() for area in ['EAT', 'PNA']: print(area) cart_out = cart_out_orig + '{}_NDJFM/'.format(area) ctl.mkdir(cart_out) results_hist, results_ref = ctl.load_wrtool( file_hist.format(area, ye1, ye2)) results_hist_refEOF, _ = ctl.load_wrtool( file_hist_refEOF.format(area, ye1, ye2)) results_hist_cmip5, _ = ctl.load_wrtool( file_hist_cmip5.format(area, ye1, ye2)) results_hist_refEOF_cmip5, _ = ctl.load_wrtool( file_hist_refEOF_cmip5.format(area, ye1, ye2)) resdict = dict() resdict['cmip6'] = results_hist resdict['cmip5'] = results_hist_cmip5 resdict['cmip6_refEOF'] = results_hist_refEOF resdict['cmip5_refEOF'] = results_hist_refEOF_cmip5 var_ratio = dict() freqbias = dict()
#cart = '/home/fabiano/Research/lavori/WeatherRegimes/ERA5/' cart = '/home/fedef/Research/lavori/valembo_era5/' #fil = 'out_ERA5_{}_{}_4clus_55perc_allyrs.p' fil = 'ERA5_v90/out_ERA5_v90_{}_{}_4clus_90perc_allyrs.p' filmask = cart + 'mask_1D_{}_{}.p' reg_events = dict() regime_ref = dict() figs_scatter = [] figs_clouds = [] for area in ['EAT', 'PNA', 'NML']: for season in ['DJF', 'JJA']: resu, resu_ref = ctl.load_wrtool( cart + fil.format(season, area)) # carico i regimi del wrtool gigi = resu['ERA5'] # conto i giorni if season == 'DJF': nye = 33 # from 1979 to 2012 lensea = 90 add = 9 + 31 skip = 59 elif season == 'JJA': nye = 34 lensea = 92 add = 0 skip = 0 for ke in ['labels', 'dist_centroid', 'pcs', 'dates']:
cart_in = '/home/fedef/Research/lavori/tipes/' patnames = dict() patnames['DJFM'] = ['NAO+', 'SBL', 'NAO-', 'AR'] patnames['JJAS'] = ['SBL', 'NAO+', 'NAO-/AL', 'AR'] #for tip in ['refCLUS', 'refEOF', 'refCLUS_dtr_reb']: tip = 'refCLUS_dtr_reb' keall = ['pi', 'ho03', 'c3r5'] #, 'eta'] colorz = ['steelblue', 'indianred', 'forestgreen'] #, 'orange'] # read output for seas in ['JJAS', 'DJFM']: resu, resu_ref = ctl.load_wrtool( cart_in + 'out_tipes_hosing_projpi_{}_EAT_4clus_4pcs_allyrs_{}.p'.format( seas, tip)) # calculate frequency in running windows # 30-yr windows, distribution of seasonal frequency timeseries = dict() for ke in resu.keys(): timeseries[ke] = xr.DataArray(resu[ke]['freq_clus_seasonal'], dims=('reg', 'time'), coords={ 'reg': [0, 1, 2, 3], 'time': resu[ke]['freq_clus_seasonal_years'] }) # resu2, resu_ref2 = ctl.load_wrtool(cart_in + 'out_tipes_nnetau_proj_NDJFM_EAT_4clus_4pcs_allyrs_{}.p'.format(tip))
'piControl', 'stabilization-ssp585-2025', 'stabilization-ssp585-2050', 'stabilization-ssp585-2100' ] colorz = ['black', 'forestgreen', 'orange', 'violet'] patnames = dict() patnames['DJFM'] = ['NAO+', 'SBL', 'NAO-', 'AR'] patnames['JJAS'] = ['SBL', 'NAO+', 'NAO-/AL', 'AR'] tip = 'refCLUS_dtr_reb' # read output for seas in ['JJAS', 'DJFM']: resu, resu_ref = ctl.load_wrtool( cart_in + 'out_bottino_{}_EAT_4clus_4pcs_allyrs_{}.p'.format(seas, tip)) # calculate frequency in running windows # 30-yr windows, distribution of seasonal frequency timeseries = dict() for ke in resu.keys(): if 'freq_clus_seasonal' not in resu[ke]: resu[ke]['freq_clus_seasonal'], resu[ke][ 'freq_clus_seasonal_years'] = ctl.calc_seasonal_clus_freq( resu[ke]['labels'], resu[ke]['dates'], 4) timeseries[ke] = xr.DataArray(resu[ke]['freq_clus_seasonal'], dims=('reg', 'time'), coords={ 'reg': [0, 1, 2, 3], 'time':