var, coords, aux_info = ctl.read_iris_nc(ref_file, extract_level_hPa=500) lat = coords['lat'] lon = coords['lon'] dates = coords['dates'] var, dates = ctl.sel_time_range(var, dates, ctl.range_years(yearange[0], yearange[1])) var_set, dates_set = ctl.seasonal_set(var, dates, 'DJF', seasonal_average=True) years = np.array([da.year for da in dates_set]) ############## PLOT GLOBAL TRENDS ###################### fig, ax = plt.subplots() glob_mea = ctl.global_mean(var_set, lat) g0 = glob_mea[0] m, c = ctl.linear_regre(years, glob_mea) ax.scatter(years, glob_mea - g0, label='Global', color='blue') ax.plot(years, c + m * years - g0, color='blue') var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_set, 'EAT') eat_mea = ctl.global_mean(var_area, lat_area) g0 = eat_mea[0] m, c = ctl.linear_regre(years, eat_mea) ax.scatter(years, eat_mea - g0, label='EAT', color='green') ax.plot(years, c + m * years - g0, color='green') var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_set, 'NH') eat_mea = ctl.global_mean(var_area, lat_area) g0 = eat_mea[0]
glomeans_245, yeamean_245 = pickle.load(open(cart_out + 'yeamean_245.p', 'rb')) yeamean_126 = dict() glomeans_126 = dict() ru = 'ssp126' for var in ['tas', 'pr']: print(var) fils = glob.glob(filna.format(ru, var, var)) kose = xr.open_mfdataset(fils, use_cftime=True) kose = kose.drop_vars('time_bnds') cosoye = kose[var].groupby("time.year").mean().compute() yeamean_126[(ru, var)] = cosoye glomeans_126[(ru, var)] = (cosoye.year.values, ctl.global_mean(cosoye)) pickle.dump([glomeans_126, yeamean_126], open(cart_out + 'yeamean_126.p', 'wb')) #glomeans_126, yeamean_126 = pickle.load(open(cart_out + 'yeamean_126.p', 'rb')) ######################## glomeans, pimean, yeamean, mapmean = pickle.load( open(cart_in + 'bottino_seasmean_2D.p', 'rb')) glomeans.update(glomeans_245) yeamean.update(yeamean_245) glomeans.update(glomeans_126) yeamean.update(yeamean_126)
var = 'tas' for varnam in ['tas', 'pr', 'uas']: print(varnam) for ru, col in zip(allru, colors): print(ru) filist = glob.glob(filna.format(ru, ru[1:], miptab, varnam)) gigi = xr.open_mfdataset(filist, use_cftime=True) var = np.array(gigi[varnam].data) lat = np.array(gigi.lat.data) lon = np.array(gigi.lon.data) dates = np.array(gigi.time.data) varye, datye = ctl.yearly_average(var, dates) glomean = ctl.global_mean(varye, lat) resdict[(ru, varnam, 'glomean')] = glomean ok200 = np.array([da.year > dates[-1].year - 200 for da in dates]) varok = var[ok200] dateok = dates[ok200] resdict[(ru, varnam, 'mean200')], resdict[(ru, varnam, 'std200')] = ctl.seasonal_climatology( varok, dateok, 'year') resdict[(ru, varnam, 'mean200', 'DJFM')], resdict[(ru, varnam, 'std200', 'DJFM')] = ctl.seasonal_climatology( varok, dateok, 'DJFM') resdict[(ru, varnam, 'mean200',
#ctl.plot_multimap_contour(rad_flds['net_srf'], lats, lons, plot_anomalies=False, color_percentiles=(1,99), title='SRF_NET', cmap='viridis', plot_type='pcolormesh') ok_coso = np.mean(rad_flds['net_srf'][1:], axis=0) avfld[(expnam, 'net_srf')] = ok_coso ctl.plot_map_contour(np.mean(rad_flds['net_srf'][1:], axis=0), lats, lons, plot_anomalies=True, color_percentiles=(1, 99), title='SRF_NET', cmap='viridis', plot_type='pcolormesh', filename=cart + 'map_net_srf.pdf') print('NET SRF', ctl.global_mean(ok_coso, lats)) ## over land and ocean ok_coso = avfld[(expnam, 'net_srf')] ok_land = ctl.global_mean(ok_coso, lats, okmask) ok_ocean = ctl.global_mean(ok_coso, lats, ~okmask) print('NET SRF - LAND', ok_land) print('NET SRF - OCEAN', ok_ocean) for var in 'osrtotc olrtotc'.split(): var_name, nlon, nlat, cose = ctl.read_globo_plotout(cart + var) rad_flds[var] = cose[init:] / 86400. print(nlon, nlat) rad_flds['net_toa'] = rad_flds['osrtotc'] + rad_flds['olrtotc']
from matplotlib.animation import ImageMagickFileWriter import cartopy.crs as ccrs #cart = '/home/fabiano/Research/lavori/SPHINX_for_lisboa/' cart = '/home/fedefab/Scrivania/Research/Post-doc/SPHINX/' ref_period = ctl.range_years(1850, 1900) filena = 'lcb0-1850-2100-tas_mon.nc' var, lat, lon, dates, time_units, var_units = ctl.read3Dncfield(cart + filena) dates_pdh = pd.to_datetime(dates) # Global stuff global_mean = ctl.global_mean(var, lat) zonal_mean = ctl.zonal_mean(var) climat_mon, dates_mon, climat_std = ctl.monthly_climatology( var, dates, dates_range=ref_period) climat_year = np.mean(climat_mon, axis=0) yearly_anom, years = ctl.yearly_average(var, dates) yearly_anom = yearly_anom - climat_year zonal_anom = ctl.zonal_mean(yearly_anom) global_anom = ctl.global_mean(yearly_anom, lat) del var # GIF animation
cart_out_wcmip5 = cart_out + 'wcmip5/' ctl.mkdir(cart_out_wcmip5) corrmaps = dict() for seas in ['NDJFM', 'year']: for area in ['EAT', 'PNA']: for reg in range(4): trendmat = tas_trends[('ssp585', okmods[0], seas)] corr_map = np.empty_like(trendmat) pval_map = np.empty_like(trendmat) nlat, nlon = trendmat.shape lat, lon = ctl.genlatlon(nlat, nlon) ssp = 'ssp585' gw_cmip6 = np.array([ ctl.global_mean(tas_trends[(ssp, mod, seas)], lat) for mod in okmods ]) frok_cmip6 = np.array( [cose[(ssp, area, mod, 'trend', reg)] for mod in okmods]) ssp = 'rcp85_cmip5' gw_cmip5 = np.array([ ctl.global_mean(tas_trends[(ssp, mod, seas)], lat) for mod in okmods_cmip5 ]) frok_cmip5 = np.array( [cose[(ssp, area, mod, 'trend', reg)] for mod in okmods_cmip5]) gw = np.concatenate([gw_cmip5, gw_cmip6]) frok = np.concatenate([frok_cmip5, frok_cmip6])
for varna in varnames: vardict[varna] = np.stack(vardict[varna]) for key in vardict: print(vardict[key].shape) radclim[(exp, 'map', key, am)] = np.mean(vardict[key], axis=0) radclim[(exp, 'map_std', key, am)] = np.std(vardict[key], axis=0) radclim[(exp, 'zonal', key, am)] = np.mean(radclim[(exp, 'map', key, am)], axis=-1) radclim[(exp, 'zonal_std', key, am)] = np.mean(radclim[(exp, 'map_std', key, am)], axis=-1) radclim[(exp, 'global', key, am)] = ctl.global_mean(radclim[(exp, 'map', key, am)], lat) radclim[(exp, 'global_std', key, am)] = ctl.global_mean(radclim[(exp, 'map_std', key, am)], lat) pickle.dump(radclim, open(cart_out + 'cloudcover_allens.p', 'wb')) # radclim = pickle.load(open(cart_out+'cloudcover_allens.p')) # varniuu, lat, lon, dates, time_units, var_units = ctl.read3Dncfield(cart_in+namefi.format('lcb0',1988,'hcc')) # del varniuu # figure #voglio figura con globalmean base e stoc anno per anno (con err da std? forse) titlevar = dict() titlevar['hcc'] = 'High clouds cover' titlevar['lcc'] = 'Low clouds cover' titlevar['mcc'] = 'Mid clouds cover'
# sennò non ha molto senso, cioè beccherò le zone che si scaldano di più # devo dividere sst_trend e freq_trend per il global tas trend di ogni modello corrmaps = dict() ssp = 'ssp585' for seas in ['NDJFM', 'year']: for area in ['EAT', 'PNA']: for reg in range(4): # trendmat = tas_trends[(ssp, okmods[0])] trendmat = field_trends[(ssp, okmok[0].split('_')[0], seas)] corr_map = np.empty_like(trendmat) pval_map = np.empty_like(trendmat) nlat, nlon = trendmat.shape lat, lon = ctl.genlatlon(nlat, nlon) gw = np.array([ctl.global_mean(tas_trends[(ssp, mod, seas)], lat_180) for mod in okmok]) #gw = np.array([ctl.global_mean(tas_trends[(ssp, mod.split('_')[0])], lat) for mod in okmok]) frok = np.array([cose[(ssp, area, mod, 'trend', reg)] for mod in okmok]) for la in range(nlat): for lo in range(nlon): tastr = np.array([field_trends[(ssp, mod.split('_')[0], seas)][la, lo] for mod in okmok]) pears, pval = stats.pearsonr(frok/gw, tastr/gw) corr_map[la, lo] = pears pval_map[la, lo] = pval corrmaps[('corr', area, reg)] = corr_map corrmaps[('pval', area, reg)] = pval_map fnam = cart_out + '{}_corrmap_{}_{}_{}.pdf'.format(fieldnam, area, reg, seas)
print(annme) years = np.arange(1850,2101) ensmems = ['lcb0', 'lcb1', 'lcb2', 'lcs0', 'lcs1', 'lcs2'] radclim_yr = dict() for exp in ensmems: for varna in varnames: radclim_yr[('zonal', exp, varna)] = [] radclim_yr[('global', exp, varna)] = [] for year in years: vardict = dict() for varna in varnames: varniuu, lat, lon, dates, time_units, var_units = ctl.read3Dncfield(cart_in+namefi.format(exp,year,varna)) vardict[varna] = np.mean(varniuu, axis = 0) radclim_yr[('global', exp, varna)].append(ctl.global_mean(vardict[varna], lat)) radclim_yr[('zonal', exp, varna)].append(ctl.zonal_mean(vardict[varna])) for varna in varnames: radclim_yr[('global', exp, varna)] = np.array(radclim_yr[('global', exp, varna)]) radclim_yr[('zonal', exp, varna)] = np.stack(radclim_yr[('zonal', exp, varna)]) for varna in varnames: radclim_yr[('global', 'base', varna)] = np.mean([radclim_yr[('global', exp, varna)] for exp in ensmems if 'lcb' in exp], axis = 0) radclim_yr[('global', 'stoc', varna)] = np.mean([radclim_yr[('global', exp, varna)] for exp in ensmems if 'lcs' in exp], axis = 0) radclim_yr[('zonal', 'base', varna)] = np.mean([radclim_yr[('zonal', exp, varna)] for exp in ensmems if 'lcb' in exp], axis = 0) radclim_yr[('zonal', 'stoc', varna)] = np.mean([radclim_yr[('zonal', exp, varna)] for exp in ensmems if 'lcs' in exp], axis = 0) pickle.dump(radclim_yr, open(cart_out+'cloudcover_yearly.p', 'wb')) sys.exit()