alternative='two-sided') filos.write( 'eof {:3d}: D -> {:8.4f} , pval -> {:12.3e}\n'.format( eof, D, pval)) elif test == 'Anderson': D, critvals, pval = stats.anderson_ksamp( [okpc1[:, eof], okpc2[:, eof]]) filos.write( 'eof {:3d}: D -> {:8.4f} , pval -> {:12.3e}\n'.format( eof, D, pval)) filos.close() sys.exit() # ORA prendo results_ref e faccio anno per anno i miniclusters e faccio il plottino var_set, dates_set = ctl.seasonal_set(nulabs_ref, results_ref['dates'], 'DJF') pcs_set, dates_set = ctl.seasonal_set(results_ref['pcs'], results_ref['dates'], 'DJF') years = [da[0].year for da in dates_set] # for numy in [6, 10, 20, 30]: # for reg in range(4): # figs = [] # #for va, pcs, ye in zip(var_set, pcs_set, years): # #for cyr in np.arange(len(years))[::10][:-1]: # for cyr in np.arange(numy/2, len(years)-numy/2-1): # va = np.concatenate(var_set[cyr-numy/2:cyr+numy/2], axis = 0) # pcs = np.concatenate(pcs_set[cyr-numy/2:cyr+numy/2], axis = 0) # ye = years[cyr]-numy/2 # ye2 = years[cyr]+numy/2 #
print(mod) whos_mod = all_mods == mod ok_mems = np.sort(all_mems[whos_mod]) bootstraps_all = dict() for mem in ok_mems: print(mod, mem) modmem = mod + '_' + mem results[modmem]['varopt'] = ctl.calc_varopt_molt( results[modmem]['pcs'], results[modmem]['centroids'], results[modmem]['labels']) results[modmem]['autocorr_wlag'] = ctl.calc_autocorr_wlag( results[modmem]['pcs'], results[modmem]['dates'], out_lag1=True) pcs_seas_set, dates_seas_set = ctl.seasonal_set( results[modmem]['pcs'], results[modmem]['dates'], 'DJF') labels_seas_set, dates_seas_set = ctl.seasonal_set( results[modmem]['labels'], results[modmem]['dates'], 'DJF') n_seas = len(dates_seas_set) years_set = np.array([dat[0].year for dat in dates_seas_set]) bootstraps = dict() for nam in allkeysss: bootstraps[nam] = [] t0 = datetime.now() for i in range(n_bootstrap): #if i % 10 == 0: print(i) ok_yea = np.sort(np.random.choice(list(range(n_seas)), n_choice))
modnam = 'EC-Earth3P' yearange = (2015, 2100) cbar_range = [0., 3.] cbar_range_notr = [-1, 1] ############################################################################# var, coords, aux_info = ctl.read_iris_nc(ref_file, extract_level_hPa=500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] var, dates = ctl.sel_time_range(var, dates, ctl.range_years(yearange[0], yearange[1])) var_set, dates_set = ctl.seasonal_set(var, dates, 'DJF', seasonal_average=True) years = np.array([da.year for da in dates_set]) ############## PLOT GLOBAL TRENDS ###################### fig, ax = plt.subplots() glob_mea = ctl.global_mean(var_set, lat) g0 = glob_mea[0] m, c = ctl.linear_regre(years, glob_mea) ax.scatter(years, glob_mea - g0, label='Global', color='blue') ax.plot(years, c + m * years - g0, color='blue') var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_set, 'EAT') eat_mea = ctl.global_mean(var_area, lat_area) g0 = eat_mea[0] m, c = ctl.linear_regre(years, eat_mea)
results_ssp = res['models'] yr0 = 1950 yr1 = 2005 allyr = np.arange(1950, 2005) yr = allyr # Erasing incomplete runs avlen = np.median( [len(results_hist[ke]['labels']) for ke in results_hist.keys()]) for ke in tuple(results_hist.keys()): if len(results_hist[ke]['labels']) < avlen - 1000: del results_hist[ke] elif len(results_hist[ke]['labels']) > avlen + 100: # there is some duplicated year labs, dats = ctl.seasonal_set(results_hist[ke]['labels'], results_hist[ke]['dates'], None) pcs, dats = ctl.seasonal_set(results_hist[ke]['pcs'], results_hist[ke]['dates'], None) yeas = np.array([da[0].year for da in dats]) labs_ok = [] dats_ok = [] pcs_ok = [] for ye in np.arange(1950, 2005): okse = np.where(yeas == ye)[0][0] labs_ok.append(labs[okse]) dats_ok.append(dats[okse]) pcs_ok.append(pcs[okse]) results_hist[ke]['labels'] = np.concatenate(labs_ok) results_hist[ke]['dates'] = np.concatenate(dats_ok) results_hist[ke]['pcs'] = np.concatenate(pcs_ok)
for ise, seas in enumerate(seasons): y1 = 1958 if seas == 'FM': y1 = 1959 print(indexname, seas, y1) cart_out = cart_out_ind + seas + '/' if not os.path.exists(cart_out): os.mkdir(cart_out) # plt.ion() fig = plt.figure(figsize=(16, 12)) for reg in range(4): ax = fig.add_subplot(2, 2, reg + 1) #freq_ok, dates_ok = ctl.sel_season(results_ref['monthly_freq']['freq'][reg], results_ref['monthly_freq']['dates'], seas) freq_seas, dates_seas = ctl.seasonal_set( results_ref['monthly_freq']['freq'][reg], results_ref['monthly_freq']['dates'], seas, dates_range=ctl.range_years(y1, 2014)) freq_seas = np.mean(freq_seas, axis=1) years = np.array([da.year for da in yrdates]) yealen = np.arange(len(years)) # freq = np.array(ctl.running_mean(freq_ok, 15)) freq = np.array(ctl.running_mean(freq_seas, n_yr)) oks = ~np.isnan(freq) freq = freq[oks] #amvc = np.array(ctl.running_mean(amv_ref_djf, 15)) print(len(freq), len(years)) years = years[oks]
kwar['detrended_eof_calculation'] = False kwar['detrended_anom_for_clustering'] = False kwar['nrsamp_sig'] = 500 results_ref = cd.WRtool_core(var_season, lat, lon, dates_season, area, heavy_output=True, **kwar) kwar['ref_solver'] = results_ref['solver'] kwar['ref_patterns_area'] = results_ref['cluspattern_area'] kwar['use_reference_eofs'] = True var_era_set, dates_era_set = ctl.seasonal_set(var_season, dates_season, season) n_seas = len(var_era_set) years_set = np.array([dat[0].year for dat in dates_era_set]) # LEGGO SPHINX E LANCIO IL BOOTSTRAP var_mod = dict() dates_mod = dict() all_res = dict() #for mod in ['base', 'stoc', 'era']: # print(mod) # if mod in ['base', 'stoc']: # var_mod = [] # dates_mod = [] # for i in range(3): # file_in = cart_in + filenam[mod].format(i)
results_hist[mod] = results_hist['EC-Earth3_r1i1p1f1'] del ece_ssp['EC-Earth3_r1i1p1f1'] results_ssp.update(ece_ssp) for mod in results_ssp.keys(): print(mod) if mod not in results_hist: print('skipping ' + mod) continue bau = results_hist[mod]['var_dtr'] bauda = np.arange(1965, 2015) if len(bau) != len(bauda): dates_set, _ = ctl.seasonal_set(results_hist[mod]['dates'], results_hist[mod]['dates'], 'NDJFM', seasonal_average=False) bauda = np.array([da[0].year for da in dates_set]) gigi = results_ssp[mod]['var_dtr'] gigida = np.arange(2015, 2100) if len(gigi) != len(gigida): dates_set, _ = ctl.seasonal_set(results_ssp[mod]['dates'], results_ssp[mod]['dates'], 'NDJFM', seasonal_average=False) gigida = np.array([da[0].year for da in dates_set]) annette = np.concatenate([bauda, gigida]) cosette = np.concatenate([bau, gigi]) coeffs, covmat = np.polyfit(annette, cosette, deg=3, cov=True)
mem = 'r1' if na == 'ssp585': mem = 'r4' fils = np.concatenate( [glob.glob(filna.format(na, mem, miptab, var)) for var in allvars_2D]) kose = xr.open_mfdataset(fils, use_cftime=True) kose = kose.drop_vars('time_bnds') for var in allvars_2D: print(var) if var not in kose: continue yeamean[(ru, var, 'max')] = ctl.seasonal_set(kose[var], season='year', seasonal_stat='max') if var != 'pr': cosoye = kose[var].groupby("time.year").mean().compute() yeamean[(ru, var, 'mean')] = cosoye yeamean[(ru, var, 'min')] = ctl.seasonal_set(kose[var], season='year', seasonal_stat='min') else: yeamean[(ru, var, 'sum')] = ctl.seasonal_set(kose[var], season='year', seasonal_stat='sum') pickle.dump(yeamean, open(cart_out + 'bottino_yeastat_tas.p', 'wb'))
allruK = allru enso_std50 = dict() enso_abs50 = dict() enso_yr = dict() for ru in allruK: if ind not in ['nam', 'sam']: # if ru == 'b990': # years = np.reshape(enso[ru].time.values, (-1, 12))[:, 0].astype(int) # piuz = np.reshape(enso[ru]['tos'].values, (-1, 12)).mean(axis = 1) # piuz = xr.DataArray(data = piuz, dims = ['year'], coords = [years]) # else: piuz = enso[ru].groupby('time.year').mean() elif ind == 'nam': piuz = ctl.seasonal_set(enso[ru], season='NDJFM', seasonal_stat='mean') elif ind == 'sam': piuz = ctl.seasonal_set(enso[ru], season='MJJAS', seasonal_stat='mean') # if ind == 'amv' and ru == 'pi': # # remove 200yr oscillation # piuzlow = ctl.lowpass_butter(piuz, 150) # piuz = piuz-piuzlow enso_yr[ru] = piuz enso_std50[ru] = [] enso_abs50[ru] = []
for mod in ece_ssp.keys(): results_hist[mod] = results_hist['EC-Earth3_r1i1p1f1'] del ece_ssp['EC-Earth3_r1i1p1f1'] del ece_ssp_rebase['EC-Earth3_r1i1p1f1'] results_ssp.update(ece_ssp) results_ssp_rebase.update(ece_ssp_rebase) # Erasing incomplete runs for ke in tuple(results_ssp.keys()): if len(results_ssp[ke]['labels']) < 12000: del results_ssp[ke] elif len(results_ssp[ke]['labels']) > 13000: # there is some duplicated year for cosone in [results_ssp, results_ssp_rebase]: labs, dats = ctl.seasonal_set(cosone[ke]['labels'], cosone[ke]['dates'], None) pcs, dats = ctl.seasonal_set(cosone[ke]['pcs'], cosone[ke]['dates'], None) yeas = np.array([da[0].year for da in dats]) labs_ok = [] dats_ok = [] pcs_ok = [] for ye in np.arange(2015, 2100): okse = np.where(yeas == ye)[0][0] labs_ok.append(labs[okse]) dats_ok.append(dats[okse]) pcs_ok.append(pcs[okse]) cosone[ke]['labels'] = np.concatenate(labs_ok) cosone[ke]['dates'] = np.concatenate(dats_ok) cosone[ke]['pcs'] = np.concatenate(pcs_ok)
print(mem) fils = glob.glob(filna.format(exp, mem, miptab, var)) if len(fils) == 0: print('NO data for {} {}'.format(var, exp, mem)) continue memok.append(mem) kose = xr.open_mfdataset(fils, use_cftime=True) kose = kose.drop_vars('time_bnds') cosoye = kose[var].groupby("time.year").mean().compute() yeamean[(exp, mem, var)] = cosoye for sea in allseasons: seamean[(exp, mem, var, sea)] = ctl.seasonal_set(kose[var], season=sea, seasonal_stat='mean') cosoye = np.mean([yeamean[(exp, mem, var)] for mem in memok], axis=0) yeamean[(exp, 'ensmean', var)] = cosoye cosoye = np.std([yeamean[(exp, mem, var)] for mem in memok], axis=0) yeamean[(exp, 'ensstd', var)] = cosoye yeamean[(exp, 'members')] = memok pickle.dump( [yeamean, seamean], open(cart_out + 'bottino_yeamean_3_{}_{}.p'.format(exp, var), 'wb'))
cart_lui = cart + 'Results_v5_rebase/{}_NDJFM/'.format(area) freqs, residtimes, patterns = pickle.load( open(cart_lui + 'allresults_dicts_{}_v3.p'.format(area), 'rb')) okmods = [ ke[1] for ke in freqs if 'ssp585' in ke and 'tot50' in ke and 'all' not in ke and 'rel' not in ke ] print(okmods) #['BCC-CSM2-MR_r1i1p1f1', 'CanESM5_r1i1p1f1', 'CESM2-WACCM_r1i1p1f1\', 'CNRM-CM6-1_r1i1p1f2', 'CNRM-ESM2-1_r1i1p1f2', 'EC-Earth3_r1i1p1f1', 'FGOALS-g3_r1i1p1f1', 'INM-CM4-8_r1i1p1f1', 'INM-CM5-0_r1i1p1f1', 'IPSL-CM6A-LR_r1i1p1f1', 'MIROC6_r1i1p1f1', 'MPI-ESM1-2-HR_r1i1p1f1', 'MRI-ESM2-0_r1i1p1f1', 'UKESM1-0-LL_r1i1p1f2'] for nu in range(4): for mod in okmods: var, dat = ctl.seasonal_set( results_ssp['EC-Earth3_r1i1p1f1']['pcs'][:, 0], results_ssp['EC-Earth3_r1i1p1f1']['dates'], 'DJF', seasonal_average=True) years = np.array([da.year for da in dat]) m, c, err_m, err_c = ctl.linear_regre_witherr(years, var) daynum = np.arange(len(results_ssp['EC-Earth3_r1i1p1f1']['dates'])) m, c, err_m, err_c = ctl.linear_regre_witherr( daynum, results_ssp['EC-Earth3_r1i1p1f1']['pcs']) cartmon_hist = '/data-hobbes/fabiano/CMIP6/historical_mon_zg/' cartmon_ssp = '/data-hobbes/fabiano/CMIP6/ssp585_mon_zg/' filssp = 'zg_Amon_ssp585_{}_{}_2015-2100.nc' filhist = 'zg_{}_mon.nc'
print(seas) y1 = 1957 if seas == 'FM': y1 = 1958 print(indexname, seas, y1) cart_out = cart_out_ind + seas + '/' if not os.path.exists(cart_out): os.mkdir(cart_out) # plt.ion() fig = plt.figure(figsize=(16, 12)) for reg in range(4): ax = fig.add_subplot(2, 2, reg + 1) #freq_ok, dates_ok = ctl.sel_season(results_ref['monthly_freq']['freq'][reg], results_ref['monthly_freq']['dates'], seas) freq_seas, dates_seas = ctl.seasonal_set( results[modmem]['monthly_freq']['freq'][reg], results[modmem]['monthly_freq']['dates'], seas, dates_range=ctl.range_years(y1, y2)) freq_seas = np.mean(freq_seas, axis=1) if len(freq_seas) < len(amv_ref_yr): print('aaaaaaaaaaaa', len(freq_seas), len(amv_ref_yr)) continue years = np.array([da.year for da in yrdates]) yealen = np.arange(len(years)) # freq = np.array(ctl.running_mean(freq_ok, 15)) freq = np.array(ctl.running_mean(freq_seas, n_yr)) oks = ~np.isnan(freq) freq = freq[oks]
#fils = np.concatenate([glob.glob(filna.format(na, mem, miptab, var)) for var in allvars_2D[:-1]]) fils = glob.glob(filna.format(na, mem, miptab, var)) if len(fils) == 0: print('NO data for {} {}'.format(var, ru)) continue kose = xr.open_mfdataset(fils, use_cftime=True) kose = kose.drop_vars('time_bnds') cosoye = kose[var].groupby("time.year").mean().compute() yeamean[(ru, var)] = cosoye for sea in allseasons: seamean[(ru, var, sea)] = ctl.seasonal_set(kose[var], season=sea, seasonal_stat='mean') pickle.dump([yeamean, seamean], open(cart_out + 'bottino_yeamean_3_{}.p'.format(var), 'wb')) # 3D vars for var in allvars_3D: yeamean = dict() seamean = dict() print(var) for na, ru, col in zip(allnams, allru, colors): print(ru) mem = 'r1' if na == 'ssp585': mem = 'r4'