### detrended solver_exp_dtr = ctl.eof_computation(gi11tos_dtr, latitude=gigi.lat.values) okmatch_dtr, simatch_dtr = ctl.match_patterns(obseofs_dtr, solver_exp_dtr.eofs(eofscaling=2)[:n_ref+10], latitude = lat, ignore_global_sign = True) #expeofs_dtr = solver_exp_dtr.eofs(eofscaling=2)[:n_ref+10][okmatch_dtr] expeofs_dtr = solver_exp_dtr.eofs(eofscaling=2)[:n_ref] filout2 = cart_out + 'tos_eofs_exp_detrended.pdf' ctl.plot_multimap_contour(expeofs_dtr, lat, lon, filout2, plot_anomalies=True, cbar_range=(-0.6,0.6), subtitles= ['eof {}'.format(i) for i in range(n_ref)], cb_label='T (K)') #### matched diffs expeofs = simatch[:, np.newaxis, np.newaxis] * solver_exp.eofs(eofscaling=2)[:n_ref+10][okmatch] expeofs_dtr = simatch_dtr[:, np.newaxis, np.newaxis] * solver_exp_dtr.eofs(eofscaling=2)[:n_ref+10][okmatch_dtr] print('Ok match: ', okmatch) rcorrs = [ctl.Rcorr(ob, ex, latitude = lat) for ob,ex in zip(obseofs, expeofs)] rmss = [ctl.E_rms(ob, ex, latitude = lat) for ob,ex in zip(obseofs, expeofs)] print('Rcorrs: ', rcorrs) print('RMSs: ', rmss) print('Ok match dtr: ', okmatch_dtr) rcorrs_dtr = [ctl.Rcorr(ob, ex, latitude = lat) for ob,ex in zip(obseofs_dtr, expeofs_dtr)] rmss_dtr = [ctl.E_rms(ob, ex, latitude = lat) for ob,ex in zip(obseofs_dtr, expeofs_dtr)] print('Rcorrs: ', rcorrs_dtr) print('RMSs: ', rmss_dtr) # signs = np.array([np.sign(ctl.Rcorr(ob, ex, latitude = lat)) for ob,ex in zip(obseofs, expeofs)]) filout3 = cart_out + 'tos_eofs_diff_obs-exp_withtrend.pdf' ctl.plot_multimap_contour(expeofs-obseofs, pino.lat.values, pino.lon.values, filout3, plot_anomalies=True, cbar_range=(-0.6,0.6), subtitles= ['eof {}'.format(i) for i in range(n_ref)], cb_label='T (K)') # signs = np.array([np.sign(ctl.Rcorr(ob, ex, latitude = lat)) for ob,ex in zip(obseofs_dtr, expeofs_dtr)])
zi = kufu( np.vstack([ xi_grid.flatten(), yi_grid.flatten(), zi_grid.flatten() ])) zi = zi / np.max(zi) relent = stats.entropy(zi, zi_ref[reg]) relent_all.append(relent) bootstraps['relative_entropy'].append(relent_all) #bootstraps['RMS'].append([ctl.distance(ce, refce) for ce, refce in zip(centroids, ref_cen)]) bootstraps['patcor'].append([ ctl.Rcorr(ce, refce) for ce, refce in zip(centroids, ref_cen) ]) # redo the same for filtered regimes filt_labels = ctl.regime_filter_long(labels, dates, days_thres=5) relent_all = [] filt_centroids = [] for reg in range(4): okclu = filt_labels == reg okpc = pcs[okclu, :] kufu = ctl.calc_pdf(okpc[:, :3].T) zi = kufu( np.vstack([ xi_grid.flatten(), yi_grid.flatten(),
[serie[(var1, lb, ens)] for ens in ensmems[3:]]) all_base_1 = np.concatenate( [serie[(var1, lb, ens)] for ens in ensmems[:3]]) for var2 in ['hcc', 'mcc', 'lcc', 'tcw', 'tas', 'heat_flux']: fig = plt.figure() ax = plt.subplot(1, 1, 1) plt.title('{} vs {} - {}'.format(var1, var2, lb)) all_stoc_2 = np.concatenate( [serie[(var2, lb, ens)] for ens in ensmems[3:]]) all_base_2 = np.concatenate( [serie[(var2, lb, ens)] for ens in ensmems[:3]]) #print(var1, var2, all_base_1.shape, all_base_2.shape) sc1 = ax.scatter(all_base_1, all_base_2, label='base', s=3) sc2 = ax.scatter(all_stoc_1, all_stoc_2, label='stoc', s=3) rb = ctl.Rcorr(all_base_1, all_base_2) rs = ctl.Rcorr(all_stoc_1, all_stoc_2) plt.text(0.1, 0.95, 'R = {:5.2f}'.format(rb), transform=ax.transAxes, color=sc1.get_facecolor()[0]) plt.text(0.1, 0.9, 'R = {:5.2f}'.format(rs), transform=ax.transAxes, color=sc2.get_facecolor()[0]) plt.xlabel(var1) plt.ylabel(var2) plt.legend(loc=1)
freq_seas = np.mean(freq_seas, axis=1) years = np.array([da.year for da in yrdates]) yealen = np.arange(len(years)) # freq = np.array(ctl.running_mean(freq_ok, 15)) freq = np.array(ctl.running_mean(freq_seas, n_yr)) oks = ~np.isnan(freq) freq = freq[oks] #amvc = np.array(ctl.running_mean(amv_ref_djf, 15)) print(len(freq), len(years)) years = years[oks] yealen = yealen[oks] rco = ctl.Rcorr(amvc, freq) ref_corrs[(indexname, reg, seas)] = rco ax.set_title('Corr {}: {:5.2f}'.format(reg, rco)) ax.plot(yealen, freq, color='steelblue') ax2 = ax.twinx() ax2.plot(yealen, amvc, color='indianred') ax.set_xticks(yealen[2::15]) ax.set_xticklabels(years[2::15]) ax.set_xlabel('Years') ax.set_ylabel('WR frequency') fig.suptitle( 'Correlation of WR frequency with {} index'.format(indexname))
ctl.plot_map_contour(map_full, lat, lon, visualization='Nstereo', plot_anomalies=False, filename=cart_out_maps + 'map_full_{}_{}.pdf'.format(mod, mem), cbar_range=(0., 0.1)) allmaps[(mod, mem, 'full')], lato, lono = ctl.sel_area( lat, lon, map_full, 'EAT') allrms[(mod, mem, 'full')] = ctl.E_rms(allmaps[(mod, mem, 'full')], allmaps[('ERA', 'full')], lato) allpatcor[(mod, mem, 'full')] = ctl.Rcorr(allmaps[(mod, mem, 'full')], allmaps[('ERA', 'full')], lato) allma = [] for reg in range(4): okind = wri == reg okblo_map = np.mean(blok[okind, ...], axis=0) - map_full allma.append(okblo_map) allmaps[(mod, mem, reg)], lato, lono = ctl.sel_area( lat, lon, okblo_map, 'EAT') allrms[(mod, mem, reg)] = ctl.E_rms(allmaps[(mod, mem, reg)], allmaps[('ERA', reg)], lato) allpatcor[(mod, mem, reg)] = ctl.Rcorr(allmaps[(mod, mem, reg)], allmaps[('ERA', reg)], lato) allsums[(mod, mem, reg)] = np.sum(allmaps[(mod, mem, reg)])
for tip in ['tot LWA', 'trans LWA', 'Montg streamf']: # scatter/bar plot montg/trans fig = plt.figure(figsize=(16, 12)) axes = [] for num, patt in enumerate(patnames): ax = plt.subplot(2, 2, num + 1) obs = resu['ERA5'][tip][num] obs, lat_area, lon_area = ctl.sel_area(olat, olon, obs, areas[aaa]) modpats = [ ctl.sel_area(resu[mod]['lat'], resu[mod]['lon'], resu[mod][tip][num], areas[aaa])[0] for mod in mods_all ] patcors = [ctl.Rcorr(obs, patt, lat_area) for patt in modpats] for pos, tra, col in zip(positions, patcors, colors): ax.bar(pos, tra, color=col, width=0.4) ax.set_xticks(posticks) ax.set_xticklabels([]) if num in [2, 3]: ax.set_xticklabels(modshort, rotation=45.) ax.set_title(patt, fontsize=16) axes.append(ax) ax.grid(axis='y') #if num in [2,3]: ax.set_xlabel('regime streamf. pattern correlation') #if num in [0,2]: ax.set_ylabel('{} pattern correlation'.format(tip)) if tip == 'tot LWA': if num in [0, 2]:
fig_score, axs = figscores[(reg, 0)] axs.set_xticks(xssdi[reg], minor = False) axs.set_xticklabels(metrnam[reg], ha='center', rotation = 30) axs.legend() axs.set_ylabel(r'$R^2$') fig_score.savefig(cart_out + 'Rsquared_{}_v2_{}_{}.pdf'.format(reg, ensmod, katullo)) fig_score, axs = figscores[(reg, 1)] axs.set_xticks(xssdi[reg], minor = False) axs.set_xticklabels(metrnam[reg], ha='center', rotation = 30) axs.legend() axs.set_ylabel(r'Adjusted $R^2$') fig_score.savefig(cart_out + 'Adj_Rsquared_{}_v2_{}_{}.pdf'.format(reg, ensmod, katullo)) pickle.dump(tuttecose, open(cart_out + 'tuttecose_wcmip5.p', 'wb')) dresss = dict() for ke in drilis: dresss[ke[1]] = np.array([alldrivs_em[(ke[0], ke[1], mod)] for mod in modgen_all]) dreky = list(dresss.keys()) for ke in dresss: for ku in dreky[dreky.index(ke):]: if ke == ku: continue kenan = ~np.isnan(dresss[ke]) kunan = ~np.isnan(dresss[ku]) allna = kenan & kunan rco = ctl.Rcorr(dresss[ke][allna], dresss[ku][allna]) if np.abs(rco) > 0.3: print(ke, ku, ' -> {:5.2f}'.format(rco))
modskill = [] modskill_p50 = [] obserie = np.array([ob[reg] for ob in obsfr]) for mod in exps: modserie = [] modserie_p50 = [] for ye in allye: cose = [ results[mod]['freq_clus_seasonal']['{:02d}_{}1101'.format( nu, ye)][reg] for nu in range(25) ] modserie.append(np.mean(cose)) modserie_p50.append(np.median(cose)) modskill.append(ctl.Rcorr(obserie, modserie)) modskill_p50.append(ctl.Rcorr(obserie, modserie_p50)) xs = np.arange(len(modskill)) ax.scatter(xs, modskill, c=colors, s=100) ax.scatter(xs, modskill_p50, c=colors, s=100, marker='x') ax.grid() ax.set_xticks([]) ax.set_title(patnames[reg]) axes.append(ax) ctl.adjust_ax_scale(axes) fig.suptitle('Seasonal skill') ctl.custom_legend(fig, colors, exps)
sstmod_mean, sstmod_sd = ctl.seasonal_climatology(sstmod, datesmod, 'DJF', dates_range = ctl.range_years(1957, 2014)) # compare for area in ['EAT', 'PNA']AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA sstmod_mean_area, latsel, lonsel = ctl.sel_area(lat, lon, sstmod_mean, area_box) okpomod = (sstmod_mean_area < -100) | (sstmod_mean_area > 500) oktot = (okpomod) | (okpoera) sstmod_x = np.ma.masked_array(sstmod_mean_area, mask = oktot) sstera_x = np.ma.masked_array(sstera_mean_area, mask = oktot) ctl.plot_triple_sidebyside(sstmod_x, sstera_x, latsel, lonsel, plot_type = 'pcolormesh', filename = cart_out_maps + 'map_EAT_{}_{}.pdf'.format(mod, mem), plot_margins = area_box, title = 'DJF SST bias - {} - {}'.format(mod, mem), stitle_1 = mod, stitle_2 = 'ERA', cb_label = 'SST bias (K)') rms = ctl.E_rms(sstmod_x, sstera_x, latitude = latsel, masked = True) patcor = ctl.Rcorr(sstmod_x, sstera_x, latitude = latsel, masked = True) print(rms, patcor) allrms[(mod, mem)] = rms allpatcor[(mod, mem)] = patcor pickle.dump([allrms, allpatcor], open(cart_out + 'sst_bias_rms_djf_eat.p', 'wb')) allrms, allpatcor = pickle.load(open(cart_out + 'sst_bias_rms_djf_eat.p', 'rb')) fig = plt.figure(figsize = (16, 12)) ax = plt.subplot() ax.set_ylabel('RMS SST bias (K)') ax.set_title('RMS SST bias in North Atlantic') ax.set_xticks([]) i = 0 wi = 0.6
plt.xlabel('central year of 30yr period') plt.title('{} - {} {}'.format(area, sim, sm)) fig.savefig(cartfr+'Freq_{}_{}_filt80.pdf'.format(area, sim)) ################################################################################################# cartcp = cart + 'corrpat/' corrpat_ens = [] for ens in ensmem: corrpat = [] for ran in yr_ranges: pat_EAT = results[(ens, 'EAT', ran)]['cluspattern'][0] pat_PNA = results[(ens, 'PNA', ran)]['cluspattern'][0] corrpat.append(ctl.Rcorr(pat_EAT, pat_PNA)) corrpat_ens.append(np.array(corrpat)) fig = plt.figure() for ens, cpa in zip(base_ens, corrpat_ens[:3]): plt.plot(cyea, cpa, label = ens) # plt.scatter(cyea, sig, color = col, marker = sym, label = ens) plt.legend() plt.grid() plt.ylabel('Corr') plt.xlabel('central year of 30yr period') plt.title('base runs') fig.savefig(cartcp+'Corr_EATPNA_base.pdf') fig = plt.figure() for ens, cpa in zip(stoc_ens, corrpat_ens[3:]):
patc, freqbias, PE_grad, Pole_NA_grad, NA_EQ_grad): ctl.printsep(resu) ctl.printsep(resu) resu.write('\n\n' + mod + '\n') cose = dict() #cose['namcorr'] = nam cose['deltaT'] = delt cose['AA'] = aaa cose['ANAT'] = ana cose['PE_grad'] = peg cose['Pole_NA_grad'] = png cose['NA_EQ_grad'] = neg cose['var_ratio'] = vrat cose['cen_rcorr'] = np.mean( [ctl.Rcorr(ce1, ce2) for ce1, ce2 in zip(cen_re, cen)]) cose['patcor'] = pa cose['freq_bias'] = fb # Frequency for reg in range(4): resu.write( 'Regime {} frequency (50 and 20-yr period minus 50-yr reference)\n' .format(reg)) allres50 = [freqs[(sim, mod, 'tot50')][reg] for sim in allsims] allres50 = [allres50[0] ] + list(np.array(allres50[1:]) - allres50[0]) resu.write(stringa.format(*allres50)) allres20 = [ freqs[(sim, mod, 'last20')][reg] for sim in allsims ]
latsss = np.arange(30., 88, 2.5) i = 0 for cos, cosbia in zip([mean_field_all, lowfrvar, highfrvar, stat_eddy_all], [mf_bias, lf_bias, hf_bias, se_bias]): i += 1 for mod in model_names: rmsall = [] for ke in cos.keys(): if mod in ke: rmsall.append(cos[ke]) allrms = np.array( [ctl.E_rms(gigi, cos['ERA'], latitude=latsss) for gigi in rmsall]) allpatcor = np.array( [ctl.Rcorr(gigi, cos['ERA'], latitude=latsss) for gigi in rmsall]) cos[mod] = np.mean(rmsall, axis=0) rmsmed = ctl.E_rms(cos[mod], cos['ERA'], latitude=latsss) patmed = ctl.Rcorr(cos[mod], cos['ERA'], latitude=latsss) #cosbia.append(allrms.mean()) cosbia.append(allpatcor.mean()) #if abs(rmsmed-allrms.mean())/rmsmed > 0.1: # print(i, mod, rmsmed, np.mean(allrms), np.min(allrms), np.max(allrms)) # print(i, mod, patmed, np.mean(allpatcor), np.min(allpatcor), np.max(allpatcor)) # atm_resolution = np.array([245, 94, 97, 23, 250, 44, 100, 46, 48, 50, 27, 103, 52, 255, 106, 54, 56]) # oce_resolution = np.array([50, 29, 21, 22, 96, 23, 98, 24, 102, 25, 26, 38, 42, 104, 27, 28, 8]) atm_resolution = np.array([ 250, 100, 100, 25, 250, 50, 100, 50, 50, 50, 25, 100, 50, 250, 100, 50, 50