Пример #1
0
def compute(ifile):
    ## PRECOMPUTE
    print(ifile)
    print('Running precompute\n')
    var, level, lat, lon, dates, time_units, var_units, time_cal = ctl.read4Dncfield(
        ifile, extract_level=50000.)

    var_season, dates_season = ctl.sel_season(var, dates, season)

    climate_mean, dates_climat, climat_std = ctl.daily_climatology(
        var_season, dates_season, wnd)

    var_anom = ctl.anomalies_daily(var_season,
                                   dates_season,
                                   climate_mean=climate_mean,
                                   dates_climate_mean=dates_climat)

    var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_anom, area)
    print(var_area.shape)

    print('Running compute\n')
    #### EOF COMPUTATION
    eof_solver = ctl.eof_computation(var_area, lat_area)
    PCs = eof_solver.pcs()[:, :numpcs]

    print('Running clustering\n')
    #### CLUSTERING
    centroids, labels = ctl.Kmeans_clustering(PCs,
                                              numclus,
                                              algorithm='molteni')

    cluspattern = ctl.compute_clusterpatterns(var_anom, labels)
    cluspatt_area = []
    for clu in cluspattern:
        cluarea, _, _ = ctl.sel_area(lat, lon, clu, area)
        cluspatt_area.append(cluarea)
    cluspatt_area = np.stack(cluspatt_area)

    varopt = ctl.calc_varopt_molt(PCs, centroids, labels)
    print('varopt: {:8.4f}\n'.format(varopt))
    freq_clus = ctl.calc_clus_freq(labels)

    print('Running clus sig\n')
    significance = ctl.clusters_sig(PCs,
                                    centroids,
                                    labels,
                                    dates_season,
                                    nrsamp=5000)
    # significance_2 = ctl.clusters_sig(PCs, centroids, labels, dates_season, nrsamp = 5000)
    # print('Significances: {:7.3f} vs {:7.3f}\n'.format(significance, significance_2))

    return lat, lon, var_anom, eof_solver, centroids, labels, cluspattern, cluspatt_area, freq_clus, significance
Пример #2
0
def dothing(wind, coords, lanc=lanc20):
    area = [-60., 0., 20., 70.]
    lat = coords['lat']
    lon = coords['lon']
    wind_area, latsel, lonsel = ctl.sel_area(lat, lon, wind, area)
    wind_low = np.zeros(wind_area.shape)
    for ila, la in enumerate(latsel):
        for ilo, lo in enumerate(lonsel):
            wind_low[:, ila, ilo] = np.convolve(lanc20,
                                                wind_area[:, ila, ilo],
                                                mode='same')
    #wind_low = ctl.running_mean(wind_area, 10)

    wind_low_djf, dates = ctl.sel_season(wind_low, coords['dates'], 'DJF')

    return wind_low_djf, dates
Пример #3
0
results_ref = cd.WRtool_core(var_season,
                             lat,
                             lon,
                             dates_season,
                             area,
                             heavy_output=True,
                             **kwar)

# OK. Now I have the regimes. Read the temp/prec.
file_temp_era = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/ERAInt_daily_1979-2018_167.nc'
temp, coords_temp, aux_info_temp = ctl.read_iris_nc(file_temp_era)

seas_temp_mean, seas_temp_std = ctl.seasonal_climatology(
    temp, coords_temp['dates'], season)
temp_seas, datetemp = ctl.sel_season(temp, coords_temp['dates'], season)
temp_seas_NH, lat, lon = ctl.sel_area(coords_temp['lat'], coords_temp['lon'],
                                      temp_seas, 'NH')
del temp, temp_seas
coords_temp['lat'] = lat
coords_temp['lon'] = lon

temp_anoms = ctl.anomalies_daily_detrended(temp_seas_NH, datetemp)

file_temp_ref = iris.load(
    '/data-hobbes/fabiano/OBS/ERA/ERAInterim/ERAInt_daily_1979-2018_167.nc')[0]
# file_prec_era = '/data-hobbes/fabiano/OBS/GPCC/daily/gpcc_daily_EU_1982-2016.nc'
# prec, coords_prec, aux_info_prec = ctl.read_iris_nc(file_prec_era, select_var = 'gpcc full data daily product version 2018 precipitation per grid')
file_prec_era = '/data-hobbes/fabiano/OBS/ERA/ERAInterim/ERAInt_daily_1979-2018_228_pr_daysum_ok.nc'
prec, coords_prec, aux_info_prec = ctl.read_iris_nc(
    file_prec_era, convert_units_to='mm', regrid_to_reference=file_temp_ref
)  #, select_var = 'gpcc full data daily product version 2018 precipitation per grid')
Пример #4
0
var, dates = ctl.sel_time_range(var, dates,
                                ctl.range_years(yearange[0], yearange[1]))

var_set, dates_set = ctl.seasonal_set(var, dates, 'DJF', seasonal_average=True)
years = np.array([da.year for da in dates_set])

############## PLOT GLOBAL TRENDS ######################

fig, ax = plt.subplots()
glob_mea = ctl.global_mean(var_set, lat)
g0 = glob_mea[0]
m, c = ctl.linear_regre(years, glob_mea)
ax.scatter(years, glob_mea - g0, label='Global', color='blue')
ax.plot(years, c + m * years - g0, color='blue')

var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_set, 'EAT')
eat_mea = ctl.global_mean(var_area, lat_area)
g0 = eat_mea[0]
m, c = ctl.linear_regre(years, eat_mea)
ax.scatter(years, eat_mea - g0, label='EAT', color='green')
ax.plot(years, c + m * years - g0, color='green')

var_area, lat_area, lon_area = ctl.sel_area(lat, lon, var_set, 'NH')
eat_mea = ctl.global_mean(var_area, lat_area)
g0 = eat_mea[0]
m, c = ctl.linear_regre(years, eat_mea)
ax.scatter(years, eat_mea - g0, label='NH', color='orange')
ax.plot(years, c + m * years - g0, color='orange')
ax.legend()

ax.set_title('Trend in the average zg500 during DJF')
Пример #5
0
                     plot_anomalies=False,
                     filename=cart_out_maps + 'map_full_ERA.pdf')


def func_sum(blok_ind, reg_ind):
    alsums = []
    for reg in range(4):
        blokok = blok_ind[reg_ind == reg]
        okmap = np.mean(blokok, axis=0)
        alsums.append(np.mean(okmap))

    return np.array(alsums)


blok_anom = blok - ERA_map_full
blok_anom_area, _, _ = ctl.sel_area(lat, lon, blok_anom, 'EAT')
res_boot_ERA = ctl.bootstrap(blok_anom_area,
                             datcom,
                             'DJF',
                             y=wri,
                             apply_func=func_sum,
                             n_choice=30,
                             n_bootstrap=500)

EAT_mean = dict()
ERA_map_area, _, _ = ctl.sel_area(lat, lon, ERA_map_full, 'EAT')
EAT_mean['ERA_0'] = np.mean(ERA_map_area)

# allmaps[('ERA', 'full')], lato, lono = ctl.sel_area(lat, lon, ERA_map_full, 'EAT')
#
# ERA_allmaps = []
Пример #6
0
    #wind_low = ctl.running_mean(wind_area, 10)

    wind_low_djf, dates = ctl.sel_season(wind_low, coords['dates'], 'DJF')

    return wind_low_djf, dates


area = [-60., 0., 20., 70.]
cart_out = '/home/fabiano/Research/lavori/Jet_latitude/'

#orogfi = '/data-hobbes/reference/ERAInterim/geopot_vegcover_25.nc'
orogfi = '/data-hobbes/reference/ERAInterim/geopot_vegcover.nc'
orog, coords, aux_info = ctl.readxDncfield(orogfi, select_var='z')
#orog = orog/9.80665
orogmask = orog > 1300.0
orogarea, _, _ = ctl.sel_area(coords['lat'], coords['lon'], orogmask, area)
orogarea = orogarea[0]
#ctl.plot_map_contour(orogmask, plot_type = 'pcolormesh')

cart_in = '/nas/reference/ERA40/daily/u/'
file_in = 'u_Aday_ERA40_{}01_{}12.nc'
yea = 1957
file_in_57 = 'u_Aday_ERA40_195709_195712.nc'

winds = []
alldates = []

wind, coords, aux_info = ctl.readxDncfield(cart_in + file_in_57,
                                           extract_level=850)
wind_lo, dates = dothing(wind, coords)
winds.append(wind_lo)
Пример #7
0
varunits = dict()
varunits['temp'] = 'K'
varunits['prec'] = 'mm/day'

limitz = dict()
limitz['temp'] = (0.4, 2.)
limitz['prec'] = (0.6, 0.8)

area = 'EAT'

area_compos = dict()
for var in ['temp', 'prec']:
    for k in all_compos:
        if type(all_compos[k][var]) == list:
            all_compos[k][var] = np.stack(all_compos[k][var])
        area_compos[(var, k)], lat_area, lon_area = ctl.sel_area(lat, lon, all_compos[k][var], area)#'Eu')

ncar = np.sqrt(area_compos[(var, k)][0, ...].size)

val_compare = dict()
for cos in ['base', 'stoc']:
    for var in ['temp', 'prec']:
        et, patcor = ctl.calc_RMS_and_patcor(area_compos[(var, cos)], area_compos[(var, 'ERA')])
        val_compare[('RMS', var, cos)] = et/ncar
        val_compare[('patcor', var, cos)] = patcor

        et2, patcor2 = ctl.calc_RMS_and_patcor(area_compos[(var, cos)]+area_compos[(var, cos+'_std')], area_compos[(var, 'ERA')])
        et3, patcor3 = ctl.calc_RMS_and_patcor(area_compos[(var, cos)]-area_compos[(var, cos+'_std')], area_compos[(var, 'ERA')])
        etd = np.array([max([et[i], et2[i], et3[i]])-min([et[i], et2[i], et3[i]]) for i in range(4)])/(2*ncar)
        patd = np.array([max([patcor[i], patcor2[i], patcor3[i]])-min([patcor[i], patcor2[i], patcor3[i]]) for i in range(4)])/2.
        val_compare[('RMS', var, cos+'_std')] = etd
Пример #8
0
for aaa in ['NML', 'EAT', 'EAText']:
    # Taylor semplice
    cart_out = cart + aaa + '/'
    ctl.mkdir(cart_out)

    for tip in ['tot LWA', 'trans LWA', 'Montg streamf']:
        patt_ref = resu['ERA5'][tip]
        olat = resu['ERA5']['lat']
        olon = resu['ERA5']['lon']

        fig = plt.figure(figsize=(16, 12))
        for num, patt in enumerate(patnames):
            ax = plt.subplot(2, 2, num + 1, polar=True)

            obs = patt_ref[num]
            obs, lat_area, lon_area = ctl.sel_area(olat, olon, obs, areas[aaa])
            modpats = [
                ctl.sel_area(resu[mod]['lat'], resu[mod]['lon'],
                             resu[mod][tip][num], areas[aaa])[0]
                for mod in mods_all
            ]

            ctl.Taylor_plot(modpats,
                            obs,
                            latitude=lat_area,
                            ax=ax,
                            colors=cols,
                            markers=marks,
                            only_first_quarter=True,
                            plot_ellipse=False,
                            ellipse_color=colors[0],
Пример #9
0
allnums = [5, 10, 20]

for ifile, name in zip(listafils, model_names):
    print(name)
    var, lat, lon, dates, time_units, var_units, time_cal = ctl.readxDncfield(
        ifile, extract_level=500)
    # var, coords, aux_info = ctl.read_iris_nc(ifile)
    # lat = coords['lat']
    # lon = coords['lon']
    # dates = coords['dates']

    var, dates = ctl.sel_time_range(var, dates, ctl.range_years(1957, 2014))

    for num in allnums:
        clm, dtclm, _ = ctl.daily_climatology(var, dates, num)
        clmarea, lat_area, lon_area = ctl.sel_area(lat, lon, clm, 'EAT')
        clm_fullperiod[(name, num)] = clmarea
    dtclmpd = pd.to_datetime(dtclm)
    datesall[name] = dtclmpd

    climate_mean, dates_climate_mean = ctl.trend_daily_climat(var,
                                                              dates,
                                                              window_days=20,
                                                              window_years=nya)
    difftot, lat_area, lon_area = ctl.sel_area(
        lat, lon, climate_mean[-1] - climate_mean[0], 'EAT')
    climate_mean_dict[name] = difftot

lat_sect = [(16, None), (8, 16), (0, 8)]
lon_sect = [(0, 16), (16, 32), (32, None)]
Пример #10
0
cart_sst = '/data-woodstock/PRIMAVERA/stream1/merged/tos/'
filsst = 'tos-{}-1950-2014-{}-remap.nc'

#cart_sst = '/nas/PRIMAVERA/Stream1/hist-1950/{}/{}/tos/'
filera = '/nas/reference/ERA40+Int/sst_Amon_ERA40_195701-201812_1deg.nc'
sstera, datacoords, _ = ctl.readxDncfield(filera)
datesera = datacoords['dates']
lat = datacoords['lat']
lon = datacoords['lon']

sstera = sstera - 273.15
sstera_mean, sstera_sd = ctl.seasonal_climatology(sstera, datesera, 'DJF', dates_range = ctl.range_years(1957, 2014))

area_box = (-80, 10, 20, 80)

sstera_mean_area, latsel, lonsel = ctl.sel_area(lat, lon, sstera_mean, area_box)
okpoera = (sstera_mean_area < -100) | (sstera_mean_area > 500)

allcose = dict()
allrms = dict()
allpatcor = dict()
#for mod, mem in zip(model_names, ens_mems):
for mod in model_names:
    print(mod)
    filmod_part = 'tos-{}-1950-2014-'.format(mod)
    if 'AWI' in mod:
        filmod_part = 'tos-{}-1950-2010-'.format(mod)
    allfimod = [fi for fi in os.listdir(cart_sst) if filmod_part in fi and 'remap.nc' in fi]
    print(allfimod)

    for fi in allfimod:
Пример #11
0
                                   n_color_levels=21,
                                   draw_contour_lines=False,
                                   n_lines=5,
                                   color_percentiles=(0, 100),
                                   bounding_lat=30,
                                   plot_margins=area,
                                   add_rectangles=None,
                                   draw_grid=True,
                                   plot_type='filled_contour',
                                   verbose=False,
                                   lw_contour=0.5)

        figs.append(fig)

        gigi = sspcoso - histcoso
        gogo, _, _ = ctl.sel_area(lat, lon, gigi, area)

        diff = ref_solver.projectField(gogo,
                                       neofs=4,
                                       eofscaling=0,
                                       weighted=True)
        stri = 4 * '{:7.2f}' + '\n'
        cosi = [ctl.cosine(diff, cen) for cen in results_ref['centroids']]
        print(stri.format(*cosi))

    ctl.plot_pdfpages(cart_out_orig + 'map_rebase_diff_{}.pdf'.format(area),
                      figs)

    bau = results_hist[mod]['var_dtr']
    bauda = np.arange(1965, 2015)
    gigi = results_ssp[mod]['var_dtr']
Пример #12
0
    # zonme = ctl.zonal_mean(trend)
    # stat_eddy_trend = np.empty_like(trend)
    # for i in range(trend.shape[0]):
    #     stat_eddy_trend[i,:] = trend[i,:]-zonme[i]

    stat_eddy_trend = cose[('se_trend', mod)].squeeze()
    se_errtrend = cose[('se_errtrend', mod)].squeeze()

    # trend_area, lat_area, lon_area = ctl.sel_area(lat, lon, trend, area)
    # trend_anom = trend-np.mean(trend_area)
    trendsanom.append(trend)
    trendsstateddy.append(stat_eddy_trend)
    zontrend.append(cose[('zontrend', mod)].squeeze())

    var, lata, lona = ctl.sel_area(lat, lon, trend, 'EAT')
    zeat = ctl.zonal_mean(var)
    zontrend_EAT.append(zeat)

    var, lata, lona = ctl.sel_area(lat, lon, trend, 'PNA')
    zpna = ctl.zonal_mean(var)
    zontrend_PNA.append(zpna)

    hatchs.append(np.abs(trend) > 2 * errtrend)
    hatchs_se.append(np.abs(stat_eddy_trend) > 2 * se_errtrend)

NSIG = int(0.8 *
           len(okmods))  # number of models to consider response significant

trendsanom.append(np.mean(trendsanom, axis=0))
hatchs.append(
Пример #13
0
land_mask = land_mask[128:, :]

#lat_range = [37, 45]
#lon_range = [7, 18]
lat_range = [37, 45]
lon_range = [-79, -70]
plt.ion()
binzz = np.linspace(-20,25,50)

cart = '/home/fedefab/Scrivania/Research/Post-doc/SPHINX/Daily_temp/'
var_all = dict()

for ran in [1950,2020,2090]:
    fil = 'lcb0_tas_{}-{}.nc'.format(ran, str(ran+5)[-2:])
    var, lat, lon, dates, time_units, var_units = ctl.read3Dncfield(cart+fil)
    var = var - 273.15
    var_all[ran] = var

    var_ok, lat_area, lon_area = ctl.sel_area(lat, lon, var, lat_range+lon_range)
    mask_ok, lat_area, lon_area = ctl.sel_area(lat, lon, land_mask, lat_range+lon_range)

    listavals = []
    for cos in var_ok:
        cosok = np.min(cos[mask_ok])
        #cosok = np.percentile(cos[mask_ok], 20)
        listavals.append(cosok)
    plt.hist(listavals, bins = binzz, label = str(ran), alpha = 0.5)

plt.legend()
plt.xlim(-20,None)
Пример #14
0
v, vcoords, _ = ctl.readxDncfield(filnam.format('v', 'v'))
ta, tacoords, _ = ctl.readxDncfield(filnam.format('ta', 'ta'))
zg, zgcoords, _ = ctl.readxDncfield(filnam.format('zg', 'zg'))
zg = 9.80665 * zg
pv, pvcoords, _ = ctl.readxDncfield(filnam.format('pv', 'pv'))

#u.shape (365, 5, 241, 480)
lat_orig = ucoords['lat']
lon_orig = ucoords['lon']

levs = np.array([850, 500, 200, 50, 30])
area = (-180, 180, 10, 85)

#for da in np.arange(365):
da = -3
u_, lat, lon = ctl.sel_area(lat_orig, lon_orig, u[da, ::-1, ...], area)
v_, lat, lon = ctl.sel_area(lat_orig, lon_orig, v[da, ::-1, ...], area)
ta_, lat, lon = ctl.sel_area(lat_orig, lon_orig, ta[da, ::-1, ...], area)
zg_, lat, lon = ctl.sel_area(lat_orig, lon_orig, zg[da, ::-1, ...], area)
pv_, lat, lon = ctl.sel_area(lat_orig, lon_orig, pv[da, ::-1, ...], area)

Om = 2 * np.pi / 86400.
f = 2 * Om * np.sin(np.deg2rad(lat))
f[f == 0.0] = 1.e-6
f = f[:, np.newaxis]

#f = np.reshape(np.repeat(f, len(lon)), zg[0,0].shape)
grad_zg = dict()
ug = dict()
vg = dict()
vortg = dict()
            var, coords, aux_info = ctl.readxDncfield(cart_data + okfil)
        except Exception as exp:
            print('Unable to read data for {}, going on with next model..'.format(mod + '_' + mem))
            print(exp)
            continue

        lat = coords['lat']
        lon = coords['lon']
        dates = coords['dates']

        var, dates = ctl.sel_time_range(var, dates, ctl.range_years(1964, 2014))

        zg_dtr, coeffs, var_regional, dates_seas = ctl.remove_global_polytrend(lat, lon, var, dates, 'NDJFM', deg = 1, area = 'NML', print_trend = True)

        for area in ['EAT', 'PNA']:
            zg_ok, _, _ = ctl.sel_area(lat, lon, zg_dtr, area)
            cmean, dates_cm, _ = ctl.daily_climatology(zg_ok, dates_seas, window = 20)
            climmeans[area].append(cmean)
        #trendmat, errtrendmat, cmat, errcmat = ctl.local_lineartrend_climate(lat, lon, var, dates, 'NDJFM', print_trend = True, remove_global_trend = False, global_deg = 3, global_area = 'global')
        #field_trends[(ssp, mod, 'NDJFM')] = trendmat

    for area in ['EAT', 'PNA']:
        climate_mean[(area, mod)] = np.mean(climmeans[area], axis = 0)
        climate_std[(area, mod)] = np.std(climmeans[area], axis = 0)

    climate_mean_dates[mod] = dates_cm
    num_members[mod] = len(climmeans)

pickle.dump([climate_mean, climate_mean_dates, climate_std, num_members], open(cart_out + 'climate_mean_hist.p', 'wb'))

for area in ['EAT', 'PNA']: