def compute(ifile): ## PRECOMPUTE print(ifile) print('Running precompute\n') var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield( ifile, extract_level=50000.) var_season, dates_season = ctl.sel_season(var, dates, season) climate_mean, dates_climat, climat_std = ctl.daily_climatology( var_season, dates_season, wnd) var_anom = ctl.anomalies_daily(var_season, dates_season, climate_mean=climate_mean, dates_climate_mean=dates_climat) var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_anom, area) print(var_area.shape) print('Running compute\n') #### EOF COMPUTATION eof_solver = ctl.eof_computation(var_area, lat_area) PCs = eof_solver.pcs()[:, :numpcs] print('Running clustering\n') #### CLUSTERING centroids, labels = ctl.Kmeans_clustering(PCs, numclus, algorithm='molteni') cluspattern = ctl.compute_clusterpatterns(var_anom, labels) cluspatt_area = [] for clu in cluspattern: cluarea, _, _ = ctl.sel_area(lat, lon, clu, area) cluspatt_area.append(cluarea) cluspatt_area = np.stack(cluspatt_area) varopt = ctl.calc_varopt_molt(PCs, centroids, labels) print('varopt: {:8.4f}\n'.format(varopt)) freq_clus = ctl.calc_clus_freq(labels) print('Running clus sig\n') significance = ctl.clusters_sig(PCs, centroids, labels, dates_season, nrsamp=5000) # significance_2 = ctl.clusters_sig(PCs, centroids, labels, dates_season, nrsamp = 5000) # print('Significances: {:7.3f} vs {:7.3f}\n'.format(significance, significance_2)) return lat, lon, var_anom, eof_solver, centroids, labels, cluspattern, cluspatt_area, freq_clus, significance
def dothing(wind, coords, lanc=lanc20): area = [-60., 0., 20., 70.] lat = coords['lat'] lon = coords['lon'] wind_area, latsel, lonsel = ctl.sel_area(lat, lon, wind, area) wind_low = np.zeros(wind_area.shape) for ila, la in enumerate(latsel): for ilo, lo in enumerate(lonsel): wind_low[:, ila, ilo] = np.convolve(lanc20, wind_area[:, ila, ilo], mode='same') #wind_low = ctl.running_mean(wind_area, 10) wind_low_djf, dates = ctl.sel_season(wind_low, coords['dates'], 'DJF') return wind_low_djf, dates
ax.set_title(reg_names[reg]) ctl.adjust_ax_scale(axes) ctl.custom_legend(fig, coloks, modoks, ncol=4) fig.suptitle( 'Change of avg. regime residence time in 2050-2100 wrt 1964-2014') fig.savefig(cart_out + 'WR_av_restime_dtr_{}_{}_{}_{}.pdf'.format(*ke)) # regime frequency change for subseasons: ND, DJF, FM allseas = ['ND', 'DJF', 'FM'] for sea in allseas: for mod in modoks: lab_seas, dates_seas = ctl.sel_season( results_ssp585[mod]['labels'], results_ssp585[mod]['dates'], sea) results_ssp585[mod]['freq_clus_' + sea] = ctl.calc_clus_freq( lab_seas, numclus) lab_seas, dates_seas = ctl.sel_season(results_hist[mod]['labels'], results_hist[mod]['dates'], sea) results_hist[mod]['freq_clus_' + sea] = ctl.calc_clus_freq( lab_seas, numclus) fig = plt.figure(figsize=(16, 12)) axes = [] for reg in range(4): ax = fig.add_subplot(2, 2, reg + 1) axes.append(ax)
figure_file_cross = cart_out+'hfc_crosssections_ERAref.pdf' figures_cross = [] fluxes = dict() fluxnames = ['mshf', 'mpef', 'mlhf'] fluxlongnames = ['Sensible Heat', 'Potential Energy', 'Latent Heat'] factors = [cp/g, 1., L/g] vars = dict() varnames = ['hus', 'ta', 'va', 'zg'] fils = ['lcs0_day_1988_{}.nc'.format(varna) for varna in varnames] for varna, fi in zip(varnames, fils): var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield(cart_in+fi) var, okda = ctl.sel_season(var, dates, 'Feb') vars[varna] = var press0, latdad, londsad, datespress, time_units, var_units = ctl.read3Dncfield(cart_in+'lcs0_day_1988_ps.nc') press0, _ = ctl.sel_season(press0, datespress, 'Feb') press0 = np.mean(press0, axis = 0) mshf = factors[0]*vars['va']*vars['ta'] mpef = vars['va']*vars['zg'] mlhf = factors[2]*vars['va']*vars['hus'] mshf = np.mean(mshf, axis = 0) mpef = np.mean(mpef, axis = 0) mlhf = np.mean(mlhf, axis = 0) mshfint = np.zeros(mshf.shape[1:])
cart_out = '/home/fabiano/Research/lavori/WeatherRegimes/taspr_composites_ERA/' if not os.path.exists(cart_out): os.mkdir(cart_out) #file_in = '/data-hobbes/fabiano/OBS/ERA/ERA40+Int_daily_1957-2018_zg500_remap25_meters.nc' file_in = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/zg500/ERAInt_daily_1979-2018_129_zg500_remap25_meters.nc' print(ctl.datestamp()) # lat = np.arange(-90, 91, 2.5) # lon = np.arange(0., 360, 2.5) var, coords, aux_info = ctl.read_iris_nc(file_in, extract_level_hPa=500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] var_season, dates_season = ctl.sel_season(var, dates, season) all_years = np.arange(dates[0].year, dates[-1].year + 1) kwar = dict() kwar['numclus'] = 4 kwar['run_significance_calc'] = False kwar['numpcs'] = 4 kwar['detrended_eof_calculation'] = False kwar['detrended_anom_for_clustering'] = False kwar['nrsamp_sig'] = 500 results_ref = cd.WRtool_core(var_season, lat, lon, dates_season, area,
vars[list(var2.keys())[0]] = L * var2[list(var2.keys())[0]] era_zonal_factor = 2 * np.pi * Rearth * np.cos(np.deg2rad(lat)) era_fluxes_maps = dict() era_fluxes_zonal = dict() seasons = ['Feb', 'DJF', 'JJA'] fluxnames = ['tot', 'SH', 'PE', 'LH'] eraname = {'tot': 'p76.162', 'SH': 'p70.162', 'PE': 'p74.162', 'LH': 'p72.162'} for flun in fluxnames: for seas in seasons: era_fluxes_maps[(flun, seas)] = np.mean(ctl.sel_season(vars[eraname[flun]], dates, seas, cut=False)[0], axis=0) era_fluxes_maps[(flun, 'year')] = np.mean(vars[eraname[flun]], axis=0) for fu in era_fluxes_maps: era_fluxes_zonal[fu] = np.mean(era_fluxes_maps[fu], axis=1) * era_zonal_factor ###### print('Is it the first level???\n') tag = 'ERAwith1000' file_list = cart_in + 'all_vtgq_1988_6hrs.nc' factors = {'SH': cp / g, 'PE': 1., 'LH': L / g} #factors['PE'] = 1./g
file_in = '/data-hobbes/fabiano/OBS/ERA/ERA40+Int_daily_1957-2018_zg500_remap25_meters.nc' var, coords, aux_info = ctl.read_iris_nc(file_in, extract_level_hPa=500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] var, dates = ctl.sel_time_range(var, dates, ctl.range_years(1979, 2014)) mean_field, _ = ctl.seasonal_climatology(var, dates, season) var_anoms = ctl.anomalies_daily_detrended(var, dates) # LOW FREQ VARIABILITY #var_low = ctl.running_mean(var_anoms, 5) var_low = ctl.lowpass_lanczos(var_anoms, 6) var_low_DJF, dates_DJF = ctl.sel_season(var_low, dates, season) lowfr_variab = np.std(var_low_DJF, axis=0) lowfr_variab_zonal = ctl.zonal_mean(lowfr_variab) # High freq var_high = var_anoms - var_low var_high_DJF, dates_DJF = ctl.sel_season(var_high, dates, season) highfr_variab = np.std(var_high_DJF, axis=0) highfr_variab_zonal = ctl.zonal_mean(highfr_variab) # Stationary eddy zonal_mean = ctl.zonal_mean(mean_field) stat_eddy = np.empty_like(mean_field) for i in range(stat_eddy.shape[0]):
zonal_margins['tot'] = (-3.5e16, 4.5e16) zonal_margins['mshf'] = (-3.5e16, 4.5e16) zonal_margins['mpef'] = (-3.5e16, 4.5e16) zonal_margins['mlhf'] = (-7e15, 9.e15) map_margins = dict() map_margins['mshf'] = (-1.5e10, 1.5e10) map_margins['mpef'] = (-4.e9, 4.e9) map_margins['mlhf'] = (-7.e8, 7.e8) map_margins['tot'] = (-2.e10, 2.e10) # Loading reference pressure file pressurefile = '/data-hobbes/fabiano/SPHINX/heat_flux/1988_daily/lcs0_day_1988_ps.nc' press0row, lat, lon, datespress, time_units, var_units = ctl.read3Dncfield( pressurefile) press0 = dict() press0['DJF'] = np.mean(ctl.sel_season(press0row, datespress, 'DJF', cut=False)[0], axis=0) press0['JJA'] = np.mean(ctl.sel_season(press0row, datespress, 'JJA', cut=False)[0], axis=0) press0['year'] = np.mean(press0row, axis=0) # Loading ERA reference cart_era = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/' era_fi = 'prova_heatflux_1988_MM.nc' cpc = nc.Dataset(cart_era + era_fi) era_lat = cpc.variables['latitude'][:] era_lon = cpc.variables['longitude'][:] era_zonal_factor = 2 * np.pi * Rearth * np.cos(np.deg2rad(era_lat)) era_fluxes_maps = dict()
kwar['numclus'] = 4 kwar['run_significance_calc'] = False kwar['numpcs'] = 4 kwar['detrended_eof_calculation'] = False # detrendo io all'inizio kwar['detrended_anom_for_clustering'] = False kwar['nrsamp_sig'] = 500 var, coords, aux_info = ctl.read_iris_nc(file_in, extract_level_hPa=500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] var, dates = ctl.sel_time_range(var, dates, ctl.range_years(1957, 2014)) var_anoms = ctl.anomalies_daily_detrended(var, dates) var_season, dates_season = ctl.sel_season(var_anoms, dates, season) all_years = np.arange(dates[0].year, dates[-1].year + 1) results_ref = cd.WRtool_core(var_season, lat, lon, dates_season, area, heavy_output=True, **kwar) kwar['ref_solver'] = results_ref['solver'] kwar['ref_patterns_area'] = results_ref['cluspattern_area'] all_results = dict() for i in range(100): print(i)
zg_zon = np.mean(zg, axis=-1) zg_eddy = zg - zg_zon[..., np.newaxis] ua_climate_mean, dates_climate_mean, _ = ctl.daily_climatology(ua, coords['dates'], window=20) zg_climate_mean, dates_climate_mean, _ = ctl.daily_climatology(zg, coords['dates'], window=20) zg_climate_mean_eddy, dates_climate_mean, _ = ctl.daily_climatology( zg_eddy, coords['dates'], window=20) zg_climate_mean_zon, dates_climate_mean, _ = ctl.daily_climatology( zg_zon, coords['dates'], window=20) ua_low = ctl.butter_filter(ua, 10) ua_low_djfm, dates_djfm = ctl.sel_season(ua_low, coords['dates'], 'DJFM') zg_djfm, dates_djfm = ctl.sel_season(zg, coords['dates'], 'DJFM') zg_eddy_djfm, dates_djfm = ctl.sel_season(zg_eddy, coords['dates'], 'DJFM') # zg_low = ctl.butter_filter(zg, 10) # zg_low_djfm, dates_djfm = ctl.sel_season(zg_low, coords['dates'], 'DJFM') # # zg_low_eddy = ctl.butter_filter(zg_eddy, 10) # zg_low_eddy_djfm, dates_djfm = ctl.sel_season(zg_low_eddy, coords['dates'], 'DJFM') ### Test jli. figs = [] # jli, jspeed, jdates = cd.jetlatindex(ua, coords['lat'], coords['lon'], coords['dates'], filter = 'butter') # figs.append(cd.plot_jli_w_speed(jli, jspeed, jdates, title = 'butterworth w 0.22')) # jli, jspeed, jdates = cd.jetlatindex(ua, coords['lat'], coords['lon'], coords['dates'], filter = 'butter')
cart_out = '/home/fabiano/Research/lavori/WeatherRegimes/taspr_composites_ERA/' if not os.path.exists(cart_out): os.mkdir(cart_out) file_in = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/zg500/ERAInt_daily_1979-2018_129_zg500_remap25_meters.nc' print(ctl.datestamp()) # lat = np.arange(-90, 91, 2.5) # lon = np.arange(0., 360, 2.5) var, coords, aux_info = ctl.read_iris_nc(file_in, extract_level_hPa = 500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] print(dates[0], dates[-1], var.shape) var_season, dates_season = ctl.sel_season(var, dates, season) all_years = np.arange(dates[0].year, dates[-1].year+1) kwar = dict() kwar['numclus'] = 4 kwar['run_significance_calc'] = False kwar['numpcs'] = 4 kwar['detrended_eof_calculation'] = False kwar['detrended_anom_for_clustering'] = False kwar['nrsamp_sig'] = 500 results_ref = cd.WRtool_core(var_season, lat, lon, dates_season, area, heavy_output = True, **kwar) sys.exit() kwar['ref_solver'] = results_ref['solver']
####################################### cart_in = '/data-hobbes/fabiano/SPHINX/zg_daily/' cart_out = '/home/fabiano/Research/lavori/SPHINX_for_lisboa/WRtool/test_trend_traj_lcb0/' if not os.path.exists(cart_out): os.mkdir(cart_out) fil = cart_in + 'lcb0-1850-2100-NDJFM_zg500_NH_14473.nc' var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield( fil, extract_level=50000) #climate_mean, dates_climate_mean = ctl.trend_daily_climat(var, dates) #var_anom = ctl.anomalies_daily_detrended(var, dates, climate_mean = climate_mean, dates_climate_mean = dates_climate_mean) var_season, dates_season = ctl.sel_season(var, dates, 'DJF') erafile = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/zg500/zg500_Aday_ERAInterim_2deg_1979-2014.nc' ERA_ref_EAT = cd.WRtool_from_file(erafile, 'DJF', 'EAT', extract_level_4D=50000., numclus=4, heavy_output=True, run_significance_calc=False) area = 'EAT' ref_solver = ERA_ref_EAT['solver'] ref_patterns_area = ERA_ref_EAT['cluspattern_area'] #resu_nodet = cd.WRtool_core(var_season, lat, lon, dates_season, area, run_significance_calc = False, ref_solver = ref_solver, ref_patterns_area = ref_patterns_area, detrended_eof_calculation = False, detrended_anom_for_clustering = False, heavy_output = True)
var2, lat, lon, dates, time_units, var_units, time_cal = ctl.readxDncfield(cart_in_ref+fi) print(var2.keys()) vars[list(var2.keys())[0]] = L*var2[list(var2.keys())[0]] era_zonal_factor = 2*np.pi*Rearth*np.cos(np.deg2rad(lat)) era_fluxes_maps = dict() era_fluxes_zonal = dict() seasons = ['Feb','DJF', 'JJA'] fluxnames = ['tot', 'SH', 'PE', 'LH'] eraname = {'tot': 'p76.162', 'SH': 'p70.162', 'PE': 'p74.162', 'LH': 'p72.162'} for flun in fluxnames: for seas in seasons: era_fluxes_maps[(flun, seas)] = np.mean(ctl.sel_season(vars[eraname[flun]], dates, seas, cut = False)[0], axis = 0) era_fluxes_maps[(flun, 'year')] = np.mean(vars[eraname[flun]], axis = 0) for fu in era_fluxes_maps: era_fluxes_zonal[fu] = np.mean(era_fluxes_maps[fu], axis = 1)*era_zonal_factor ################################################################################### ann = np.arange(1950,2101,10) annme = [(a1+a2)//2 for a1,a2 in zip(ann[:-1], ann[1:])] print(annme) cart_out_results = cart_out + 'out_flux_calc_NEW/' if not os.path.exists(cart_out_results): os.mkdir(cart_out_results) seasons = ['DJF', 'JJA']
cross3d = pickle.load(open(filos, 'rb')) else: cross3d = dict() for ens in ensmem: for seas in seasons + ['year']: cross3d[(ens, seas)] = [] for yea in years: filena = '{}_mon_{}_{}.nc'.format(ens, yea, varna) var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield( cart_in_3d + filena) cli, datescli, _ = ctl.monthly_climatology(var, dates) for seas in seasons: coso = np.mean(ctl.sel_season(cli, datescli, seas, cut=False)[0], axis=0) cross3d[(ens, seas)].append(np.mean(coso, axis=-1)) coso = np.mean(cli, axis=0) cross3d[(ens, 'year')].append(np.mean(coso, axis=-1)) for seas in seasons + ['year']: cross3d[(ens, seas)] = np.stack(cross3d[(ens, seas)]) for seas in seasons + ['year']: cross3d[('base', seas)] = np.mean( [cross3d[(ens, seas)] for ens in ensmem[:3]], axis=0) cross3d[('stoc', seas)] = np.mean( [cross3d[(ens, seas)] for ens in ensmem[3:]], axis=0)