labok = oklabels == reg cosa = np.mean(prec_anoms[labok, ...], axis=0) compos['prec'].append(cosa) fig = ctl.plot_map_contour(cosa, coords_prec['lat'], coords_prec['lon'], title='Prec anom - regime {}'.format(reg), plot_margins=(-120, 120, 20, 90), cbar_range=(-5, 5), cmap='RdBu') allfigs.append(fig) # Calculate and visualize composites. FILTER ON EVENT DURATION > 5 days rsd_tim, rsd_dat, rsd_num = ctl.calc_regime_residtimes( results_ref['labels'], dates=results_ref['dates']) days_event, length_event = ctl.calc_days_event(results_ref['labels'], rsd_tim, rsd_num) okleneve, _ = ctl.sel_time_range(length_event, results_ref['dates'], dates_range=(dateprec[0], dateprec[-1])) compos['temp_filt'] = [] compos['prec_filt'] = [] for reg in range(kwar['numclus']): labok = (results_ref['labels'] == reg) & (length_event > 5) cosa = np.mean(temp_anoms[labok, ...], axis=0) compos['temp_filt'].append(cosa) fig = ctl.plot_map_contour( cosa,
for reg in range(numclus): alltimes = np.concatenate([results_hist[mem]['resid_times'][reg] for mem in allmems]) residtimes[('hist', mod, 'mean', reg)] = np.mean(alltimes) residtimes[('hist', mod, 'p90', reg)] = np.percentile(alltimes, 90) num_event[('hist', mod, reg)] = freqs[(ssp, mod, 'tot50')][reg]/residtimes[(ssp, mod, 'mean', reg)] alllabs_20 = [] alltimes_20 = [] for mem in allmems: dat1 = pd.Timestamp('09-01-1995').to_pydatetime() dat2 = pd.Timestamp('04-01-2014').to_pydatetime() labs, dats = ctl.sel_time_range(results_hist[mem]['labels'], results_hist[mem]['dates'], (dat1, dat2)) alllabs_20.append(labs) print(mem, len(labs)) restim, _, _ = ctl.calc_regime_residtimes(labs, dats) alltimes_20.append(restim) alllabs_20 = np.concatenate(alllabs_20) alltimes_20 = [np.concatenate([cos[reg] for cos in alltimes_20]) for reg in range(numclus)] for reg in range(numclus): residtimes[('hist', mod, 'mean_last20', reg)] = np.mean(alltimes_20[reg]) residtimes[('hist', mod, 'p90_last20', reg)] = np.percentile(alltimes_20[reg], 90) freqs[('hist', mod, 'last20')] = ctl.calc_clus_freq(alllabs_20, numclus) for reg in range(numclus): patterns_refEOF[('centroids', mod, reg)] = np.mean([res_hist_refEOF[mem]['centroids'][reg] for mem in allmems], axis = 0) patterns_refEOF[('patcor', mod, reg)] = np.mean([res_hist_refEOF[mem]['patcor'][reg] for mem in allmems]) patterns_refEOF[('centdist', mod, reg)] = np.mean([ctl.distance(res_hist_refEOF[mem]['centroids'][reg], results_ref['centroids'][reg]) for mem in allmems])
varopt = ctl.calc_varopt_molt(pcs, centroids, labels) autocorr = ctl.calc_autocorr_wlag(pcs, dates, out_lag1=True) bootstraps['significance'].append(sig) bootstraps['varopt'].append(varopt) bootstraps['autocorr'].append(autocorr) bootstraps['freq'].append(ctl.calc_clus_freq(labels, 4)) centdist = np.array([ ctl.distance(centroids[iclu], ref_cen[iclu]) for iclu in range(4) ]) bootstraps['dist_cen'].append(centdist) bootstraps['centroids'].append(centroids) resid_times = ctl.calc_regime_residtimes(labels, dates=dates)[0] av_res = np.array([np.mean(resid_times[reg]) for reg in range(4)]) av_res_90 = np.array( [np.percentile(resid_times[reg], 90) for reg in range(4)]) bootstraps['resid_times_av'].append(av_res) bootstraps['resid_times_90'].append(av_res_90) bootstraps['trans_matrix'].append( ctl.calc_regime_transmatrix(1, labels, dates)) # relative entropy, RMS, patcor relent_all = [] for reg in range(4): okclu = labels == reg okpc = pcs[okclu, :] for comp in range(4):
seas10 = ctl.running_mean(seasfr[reg, :], yr10) m, c, err_m, err_c = ctl.linear_regre_witherr(np.array(yr[~np.isnan(seas10)]), np.array(seas10[~np.isnan(seas10)])) trend_ssp[(ssp, mem, 'trend', 'freq10', reg)] = m trend_ssp[(ssp, mem, 'errtrend', 'freq10', reg)] = err_m # devo fare ogni dieci anni e selezionare restimem = dict() for reg in range(4): restimem[reg] = [] for ye in np.arange(2015, 2091, 5): dat1 = pd.Timestamp('09-01-{}'.format(ye)).to_pydatetime() dat2 = pd.Timestamp('04-01-{}'.format(ye+10)).to_pydatetime() labs, dats = ctl.sel_time_range(results_ssp[ssp][mem]['labels'], results_ssp[ssp][mem]['dates'], (dat1, dat2)) resti, _, _ = ctl.calc_regime_residtimes(labs, dats) for reg in range(4): restimem[reg].append(np.mean(resti[reg])) for reg in range(4): m, c, err_m, err_c = ctl.linear_regre_witherr(np.arange(2015, 2091, 5), np.array(restimem[reg])) residtime_ssp[(ssp, mem, 'trend', reg)] = m residtime_ssp[(ssp, mem, 'errtrend', reg)] = err_m for ssp in allssps: for reg in range(4): trend_ssp[(ssp, 'all', 'trend', 'seafreq', reg)] = np.array([trend_ssp[(ssp, mem, 'trend', 'seafreq', reg)] for mem in okmods_ssp]) trend_ssp[(ssp, 'all', 'errtrend', 'seafreq', reg)] = np.array([trend_ssp[(ssp, mem, 'errtrend', 'seafreq', reg)] for mem in okmods_ssp]) trend_ssp[(ssp, 'all', 'trend', 'freq10', reg)] = np.array([trend_ssp[(ssp, mem, 'trend', 'freq10', reg)] for mem in okmods_ssp]) trend_ssp[(ssp, 'all', 'errtrend', 'freq10', reg)] = np.array([trend_ssp[(ssp, mem, 'errtrend', 'freq10', reg)] for mem in okmods_ssp])
stoc_dists = np.concatenate([results[('lcs{}'.format(i), 'EAT', ran)]['dist_centroid'] for i in range(3)]) freq[('base', area, ran)] = ctl.calc_clus_freq(base_labels) freq[('stoc', area, ran)] = ctl.calc_clus_freq(stoc_labels) greylabs = base_dists > thres[area] gigi = base_labels gigi[greylabs] = np.max(base_labels)+1 freq[('base', area, ran, 'filt80')] = ctl.calc_clus_freq(gigi) greylabs = stoc_dists > thres[area] gigi = stoc_labels gigi[greylabs] = np.max(stoc_labels)+1 freq[('stoc', area, ran, 'filt80')] = ctl.calc_clus_freq(gigi) rs_base, dates_init_b = ctl.calc_regime_residtimes(base_labels, dates = dates_long) resid_times[('base', area, ran)] = rs_base rs_stoc, dates_init_s = ctl.calc_regime_residtimes(stoc_labels, dates = dates_long) resid_times[('stoc', area, ran)] = rs_stoc cartper = cart + 'resid_times/' for area in ['EAT', 'PNA']: binzzz = np.arange(0,36,5) for clu, clunam in enumerate(patnames[area]): ran = histran pts = patnames_short[area][clu] fig = plt.figure() plt.title(clunam) n, bins, patches = plt.hist(resid_times[('base', area, ran)][clu], bins = binzzz, alpha = 0.5, density = True, label = 'base') n2, bins2, patches2 = plt.hist(resid_times[('stoc', area, ran)][clu], bins = binzzz, alpha = 0.5, density = True, label = 'stoc') plt.legend()
import climtools_lib as ctl import pandas as pd import numpy as np from numpy import linalg as LA from matplotlib import pyplot as plt import pickle cart = '/home/fabiano/Research/lavori/WP2_deliverable_Oct2018/Results_WP2/regime_indices/' indxfi = 'regime_indices_ECE_LR.txt' labels = np.loadtxt(cart + indxfi) var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield( '/data-hobbes/fabiano/PRIMAVERA_Stream1_Z500remap/zg500_Aday_EC-Earth3_T255_regrid25_1979-2014.nc', extract_level=50000.) resid_times, dates_init = ctl.calc_regime_residtimes(labels, dates=dates) resid_times_noskip, dt = ctl.calc_regime_residtimes(labels, dates=dates, skip_singleday_pause=False) plt.ion() patnames = ['NAO +', 'Blocking', 'NAO -', 'Atl. Ridge'] patnames_short = ['NP', 'BL', 'NN', 'AR'] binzzz = np.arange(0, 37, 2) for clu, clunam in zip(list(range(4)), patnames): pts = patnames_short[clu] fig = plt.figure() plt.title(clunam) n, bins, patches = plt.hist(resid_times[clu], bins=binzzz, alpha=0.5,