Ejemplo n.º 1
0
def saveGPM():
    chirpsbox = [-81,-68,-18.5,0]  # peru daily

    chirpsall_list = glob.glob('/media/ck/Elements/SouthAmerica/GPM/daily/*.nc4')
    for ids, ch in enumerate(chirpsall_list):

        chirpsall = xr.open_dataset(ch)
        #ipdb.set_trace()

        if ids ==0:
            date = '2016-01-13'
            dt = pd.to_datetime(date)
            era5 = xr.open_dataset('/media/ck/Elements/SouthAmerica/ERA5/hourly/uv_15UTC/uv_15UTC_1985_peru.nc')

            u200 = era5['u'].isel(time=0).sel(level=250).squeeze().load()
            u200 = uda.flip_lat(u200)

            chirps = chirpsall['HQprecipitation'].T.squeeze()
            #ipdb.set_trace()
            ch_on_e, lut = u200.salem.lookup_transform(chirps, return_lut=True)

        else:
            ch_on_e = u200.salem.lookup_transform(chirpsall['HQprecipitation'].T.squeeze(), lut=lut)
        ch_on_e.name = 'precip'
        comp = dict(zlib=True, complevel=5)
        encoding = {'precip': comp}
        fname = os.path.basename(ch)
        fname = fname.replace('.V06.nc4.SUB.nc4', '_onERA.nc')
        #ipdb.set_trace()
        ch_on_e.to_netcdf('/media/ck/Elements/SouthAmerica/GPM/daily_onERA/'+fname+'.nc', mode='w', encoding=encoding, format='NETCDF4')
Ejemplo n.º 2
0
def readERA():

    u200orig = xr.open_dataset('/media/ck/Elements/SouthAmerica/ERA5/hourly/u_15UTC_1981-2019_peru_big.nc')
    u200orig = uda.flip_lat(u200orig)
    datetimes = pd.to_datetime(u200orig.time.values)
    newtimes = []
    for t in datetimes:
        newtimes.append(t.replace(hour=0))
    u200orig['time'] = ('time', newtimes)
    return u200orig
Ejemplo n.º 3
0
def saveERA5():
    #chirpsbox = [-81, -68, -18.5, 0]  # peru daily

    era5 = xr.open_mfdataset(cnst.ERA5_HOURLY_PL_HU + '/ERA5_*_pl.nc', concat_dim='time', combine='nested')
    u200 = era5['u'].sel(level=850, time=(era5[
                                              'time.hour'] == 9))  # .load()   #longitude=slice(bigbox[0], bigbox[1]), latitude=slice(bigbox[3], bigbox[2]),
    u200 = uda.flip_lat(u200)

    u200 = u200.sel(time=((u200['time.year'] > 1980) & (u200['time.year'] < 2019)))
    u200.name = 'u'
    comp = dict(zlib=True, complevel=5)
    encoding = {'u': comp}
    u200.to_netcdf('/media/ck/Elements/SouthAmerica/ERA5/hourly/pressure_levels/u850_09UTC_1981-2018_peru.nc', mode='w',
                  encoding=encoding, format='NETCDF4')
Ejemplo n.º 4
0
def saveCHIRPS():
    chirpsbox = [-81,-68,-18.5,0]  # peru daily

    chirpsall = xr.open_dataset(cnst.elements_drive + 'SouthAmerica/CHIRPS/chirps-v2.0.daily.peru.nc').chunk({'time':365})
    #chirpsm = xr.open_dataset(cnst.elements_drive + 'SouthAmerica/CHIRPS/chirps-v2.0.monthly.nc')
    chirpsall = chirpsall['precip']

    date = '2016-01-13'
    dt = pd.to_datetime(date)
    era5 = xr.open_dataset(cnst.ERA5_HOURLY_PL_HU+'/ERA5_'+str(dt.year)+'_'+str(dt.month).zfill(2)+'_'+str(dt.day).zfill(2)+'_pl.nc')

    u200 = era5['u'].sel(longitude=slice(chirpsbox[0], chirpsbox[1]), latitude=slice(chirpsbox[3], chirpsbox[2]), level=200, time=(era5['time.hour']==15)).squeeze().load()
    u200 = uda.flip_lat(u200)

    chirps = chirpsall.sel(time=date).load().squeeze()
    ch_on_e, lut = u200.salem.lookup_transform(chirps, return_lut=True)

    ch_on_e_all = u200.salem.lookup_transform(chirpsall, lut=lut)

    comp = dict(zlib=True, complevel=5)
    encoding = {'precip': comp}
    ch_on_e_all.to_netcdf('/media/ck/Elements/SouthAmerica/CHIRPS/CHIRPS_peru_onERA5.nc', mode='w', encoding=encoding, format='NETCDF4')
Ejemplo n.º 5
0
def saveGRIDSAT():
    chirpsbox = [-81,-68,-18.5,0]  # peru daily

    chirpsall_list = glob.glob(cnst.GRIDSAT_PERU + '/daily_-15ALLkm2_UTC_DAY/*.nc')

    for ids, ch in enumerate(chirpsall_list):

        fname = os.path.basename(ch)
        fname = fname.replace('UTCDay', 'UTCDay_onERA')
        outpath = cnst.GRIDSAT_PERU + '/daily_-15ALLkm2_UTC_DAY_onBIGERA/' + fname + '.nc'

        if os.path.isfile(outpath):
            print('File exists, continue')
            continue

        chirpsall = xr.open_dataset(ch)

        if ids ==0:
            date = '2016-01-13'
            dt = pd.to_datetime(date)
            era5 = xr.open_dataset('/media/ck/Elements/SouthAmerica/ERA5/hourly/uv_15UTC/uv_15UTC_1985_peru.nc')

            u200 = era5['u'].sel( level=250, time=(era5['time.hour']==15)).squeeze().load()
            u200 = uda.flip_lat(u200)

            chirps = chirpsall.isel(time=0).squeeze()
            #ipdb.set_trace()
            ch_on_e, lut = u200.salem.lookup_transform(chirps, return_lut=True)
        else:
            ch_on_e = u200.salem.lookup_transform(chirpsall, lut=lut)
        #ch_on_e.name = 'tir'
        comp = dict(zlib=True, complevel=5)
        encoding = {'tir': comp}
        fname = os.path.basename(ch)
        fname = fname.replace('UTCDay', 'UTCDay_onERA')
        #ipdb.set_trace()
        ch_on_e.to_netcdf(outpath, mode='w', encoding=encoding, format='NETCDF4')
Ejemplo n.º 6
0
def rewrite_AMSR2(file):

    out = file.replace('raw', 'nc')
    #out = out.replace('.nc4', '.nc')
    out = out.replace('LPRM-AMSR2', 'AMSR2')

    if '_A_' in file:
        day = True
    else:
        day = False

    if day:
        hour = 13
    else:
        hour = 1

    cut = os.path.basename(out)
    path = os.path.dirname(out)
    path = path.replace(path[-6::], '')
    pieces = cut.split('_')
    time = (pieces[5])[0:8]
    out = path + os.sep + pieces[0] + '_' + pieces[1] + '_LPRMv05_' + pieces[
        2] + '_' + time + '.nc'

    #ipdb.set_trace()
    if os.path.isfile(out):
        return

    yr = int(time[0:4])
    mon = int(time[4:6])
    day = int(time[6:8])
    date = [pd.datetime(yr, mon, day, hour, 0)]

    ds = xr.open_dataset(file)
    ds = u_darrays.flip_lat(ds)

    da = xr.DataArray((ds['soil_moisture_c1']).values.T[None, ...],
                      coords={
                          'time': date,
                          'lat': ds.Latitude.values,
                          'lon': ds.Longitude.values
                      },
                      dims=['time', 'lat', 'lon'])  # .isel(time=0)

    da.values[da.values < -1] = np.nan
    if np.sum(da.values) == np.nan:
        return

    ds = xr.Dataset({'SM': da})

    ds = ds.sel(lon=slice(-18, 30),
                lat=slice(0, 27))  #lon=slice(-20,55), lat=slice(-40,40) AFRICA

    try:
        comp = dict(zlib=True, complevel=5)
        encoding = {var: comp for var in ds.data_vars}
        ds.to_netcdf(path=out, mode='w', encoding=encoding, format='NETCDF4')

    except OSError:
        print('Did not find ' + out)
        print('Out directory not found')
    print('Wrote ' + out)
    return ds
Ejemplo n.º 7
0
def trend_all():

    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mean_mcs = 'aggs/gridsat_WA_-65_monthly_count_-40base_15-21UTC_1000km2.nc'
    extreme_mcs = 'aggs/gridsat_WA_-65_monthly_count_-40base_15-21UTC_1000km2.nc'
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-65_monthly_count_-40base_15-21UTC_1000km2.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box = [5, 55, -36, 0]  #  [-18,40,0,25] #

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    press = da2['sp']
    press = press[press['time.hour'] == 12]
    press.values = press.values * 1000
    low_press = 850
    up_press = 550

    q = da['q'].sel(level=slice(low_press - 100, low_press)).mean('level')
    q = q[q['time.hour'] == 12]
    t2d = da2['t2m']  #['t2m']
    #t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour'] == 12]

    u600 = da['u'].sel(level=slice(up_press - 100, up_press)).mean('level')
    u600 = u600[u600['time.hour'] == 12]
    v600 = da['v'].sel(level=slice(up_press - 100, up_press)).mean('level')
    v600 = v600[v600['time.hour'] == 12]

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=slice(low_press - 100, low_press)).mean('level')
    u800 = u800[u800['time.hour'] == 12]

    v800 = da['v'].sel(level=slice(low_press - 100, low_press)).mean('level')
    v800 = v800[v800['time.hour'] == 12]

    shear_u = u600 - u800  #u600-
    shear_v = v600 - v800  # v600-

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'
    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    u6 = u600
    v6 = v600

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = t2d  # grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])

    grid = grid.to_dataset()
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])

    months = [
        (11, 1), 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
    ]  #[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'mk'

        if type(m) == int:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir,
                                       m,
                                       method=method,
                                       sig=sig,
                                       wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q,
                                   m,
                                   method=method,
                                   sig=sig,
                                   hour=12,
                                   wilks=False)  #hour=12,
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        shear_mean = shearmean.mean(axis=0)

        u6trend, u6mean = calc_trend(u6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        u6_mean = u6mean.mean(axis=0)
        v6trend, v6mean = calc_trend(v6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        v6_mean = v6mean.mean(axis=0)

        t2trend_unstacked = t2trend * 10.  # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        u6trend_unstacked = u6trend * 10
        v6trend_unstacked = v6trend * 10

        tirtrend_unstacked = (
            (tirtrend.values) * 10. / tirm_mean.values) * 100.

        tirtrend_out = xr.DataArray(tirtrend_unstacked,
                                    coords=[grid['y'], grid['x']],
                                    dims=['latitude', 'longitude'])
        tirmean_out = xr.DataArray(tirm_mean,
                                   coords=[grid['y'], grid['x']],
                                   dims=['latitude', 'longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirmean_out

        t_da = t2trend_unstacked
        q_da = qtrend_unstacked
        s_da = sheartrend_unstacked
        ti_da = tirtrend_unstacked

        if len(m) == 1:
            fp = fpath + 'trend_synop_SA_-40base1000_-65Ctrend_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'trend_synop_SA_-40base1000_-65Ctrend_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(15, 8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)
        #Quiver only every 7th grid point
        u = u6trend_unstacked.values[1::2, 1::2]
        v = v6trend_unstacked.values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = u6_mean.values[1::2, 1::2]
        vv = v6_mean.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        ax1 = f.add_subplot(221)
        map.set_data(t_da.values, interp='linear')  # interp='linear'

        map.set_contour((t2_mean.values - 273.15).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5,
                        levels=[20, 23, 26, 29, 32, 35])
        map.set_plot_params(levels=[
            -0.5, -0.4, -0.3, -0.2, -0.1, -0.05, -0.02, 0.02, 0.05, 0.1, 0.2,
            0.3, 0.4, 0.5
        ],
                            cmap='RdBu_r',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(ax=ax1,
                            title='2m temperature trend | contours: mean T',
                            cbar_title='K decade-1')
        qu = ax1.quiver(xx, yy, uu, vv, scale=80, width=0.002)

        qk = plt.quiverkey(qu,
                           0.4,
                           0.03,
                           4,
                           '4 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(222)
        map.set_data(q_da.values, interp='linear')  # interp='linear'
        map.set_contour((q_mean.values).astype(np.float64),
                        interp='linear',
                        colors='k',
                        levels=[6, 8, 10, 12, 14, 16],
                        linewidths=0.5)
        map.set_plot_params(levels=[
            -0.4, -0.3, -0.2, -0.1, -0.05, -0.02, 0.02, 0.05, 0.1, 0.2, 0.3,
            0.4
        ],
                            cmap='RdBu',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(
            ax=ax2,
            title='800hPa Spec. humidity trend | contours: mean q',
            cbar_title='g kg-1 decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax3 = f.add_subplot(223)
        map.set_data(s_da.values, interp='linear')  # interp='linear'
        map.set_contour(s_da.values,
                        interp='linear',
                        levels=np.arange(-7, 7, 8),
                        cmap='Blues')

        map.set_plot_params(levels=[
            -0.5, -0.4, -0.3, -0.2, -0.1, -0.05, -0.02, 0.02, 0.05, 0.1, 0.2,
            0.3, 0.4, 0.5
        ],
                            cmap='RdBu_r',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(
            ax=ax3,
            title='800-500hPa wind shear trend, mean 500hPa wind vectors',
            cbar_title='m s-1 decade-1')
        qu = ax3.quiver(xx, yy, uu, vv, scale=80, width=0.002)

        qk = plt.quiverkey(qu,
                           0.4,
                           0.03,
                           4,
                           '4 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax4 = f.add_subplot(224)
        map.set_contour(tirm_mean.values,
                        interp='linear',
                        levels=[0.1, 0.5, 1, 2.5],
                        colors='k',
                        linewidths=0.5)

        ti_da[ti_da == 0] = np.nan
        map.set_data(ti_da)  #
        coord = [18, 25, -28, -20]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        #map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)

        map.set_plot_params(
            cmap='viridis', extend='both', levels=np.arange(
                10, 41,
                10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        dic = map.visualize(ax=ax4,
                            title='-65C cloud cover change | >1000km2 -40C',
                            cbar_title='$\%$ decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')

    pkl.dump(
        dicm,
        open(
            cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_SA.p',
            'wb'))

    pkl.dump(
        dicmean,
        open(
            cnst.network_data +
            'data/CLOVER/saves/storm_frac_mean_synop12UTC_SA.p', 'wb'))
Ejemplo n.º 8
0
def trend_all():

    srfc = cnst.ERA5_MONTHLY_SRFC_SYNOP #cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA5_MONTHLY_PL_SYNOP #cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/ERA5_WA/'

    box=[-18,30,0,25]#  [-18,40,0,25] #

    da = xr.open_dataset(pl) #xr.open_dataset(pl)
    #da = xr.decode_cf(da)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da2 = xr.open_dataset(srfc) #xr.open_dataset(srfc)
    #da2 = xr.decode_cf(da2)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da3 = xr.open_dataarray(mcs)*100
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2],box[3]))

    lons = da.longitude
    lats = da.latitude


    press = da2['tcwv']
    press = press[press['time.hour'] == 12]
    #press.values = press.values#*1000
    low_press = 925
    up_press = 650
    mid_press = 700

    q = da['q'].sel(level=slice(low_press-20, low_press)).mean('level')
    q = q[q['time.hour']==12]
    t2d = da2['sshf']#['t2m']
    #t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour']==12]

    # theta_low = u_met.theta_e(da.level.values, da['t'].sel(level=low_press), da['q'].sel(level=low_press))
    # theta_high = u_met.theta_e(da.level.values, da['t'].sel(level=mid_press), da['q'].sel(level=mid_press))
    #
    # theta_e = theta_low - theta_high

    u600 = da['u'].sel(level=slice(up_press-20, up_press)).mean('level')
    u600 = u600[u600['time.hour']==12]
    v600 = da['v'].sel(level=slice(up_press-20, up_press)).mean('level')
    v600 = v600[v600['time.hour']==12]

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=slice(low_press-20, low_press)).mean('level')
    u800 = u800[u800['time.hour']==12]

    v800 = da['v'].sel(level=slice(low_press-20, low_press)).mean('level')
    v800 = v800[v800['time.hour']==12]

    shear_u = u600-u800 #u600-
    shear_v = v600-v800 # v600-

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'

    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    u6 = shear_u#u800
    v6 = shear_v#v800

    q.values = q.values*1000

    grid = t2d.salem.grid.regrid(factor=1)
    t2 = t2d # grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3)  #t2d.salem.lookup_transform(da3['tir']) #

    grid = grid.to_dataset()
    tir = xr.DataArray(tir, coords=[da3['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])

    months= [4, (3,5), (6,8), (9,11)]#[3,4,5,6,9,10,11]#,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'mk'

        if type(m)==int:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2, m,  method=method, sig=sig,hour=12, wilks=False) #hour=12,
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir, m, method=method, sig=sig, wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q, m, method=method, sig=sig,hour=12, wilks=False) #hour=12,
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear, m, method=method, sig=sig,hour=12, wilks=False) #hour=12,
        shear_mean = shearmean.mean(axis=0)

        presstrend, pressmean = calc_trend(press, m, method=method, sig=sig,hour=12, wilks=False) #hour=12,
        press_mean = pressmean.mean(axis=0)

        u6trend, u6mean = calc_trend(u6, m,  method=method, sig=sig, hour=12,wilks=False) #hour=12,
        u6_mean = u6mean.mean(axis=0)
        v6trend, v6mean = calc_trend(v6, m, method=method, sig=sig, hour=12,wilks=False) #hour=12,
        v6_mean = v6mean.mean(axis=0)

        # thetatrend, thetamean = calc_trend(theta_e, m, method=method, sig=sig, hour=12,wilks=False) #hour=12,
        # theta_mean = thetamean.mean(axis=0)

        t2trend_unstacked = t2trend*10. # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        u6trend_unstacked = u6trend * 10
        v6trend_unstacked = v6trend * 10
        presstrend_unstacked = presstrend * 10
        # thetatrend_unstacked = thetatrend * 10

        tirtrend_unstacked = ((tirtrend.values)*10. / tirm_mean.values) * 100.
        #ipdb.set_trace()
        tirtrend_out = xr.DataArray(tirtrend_unstacked, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])
        tirtrend_out.name = 'tir'
        #tirmean_out = xr.DataArray(tirm_mean, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirm_mean

        t_da = t2trend_unstacked
        q_da = qtrend_unstacked
        s_da = sheartrend_unstacked
        ti_da = tirtrend_out
        tcwv_da = presstrend_unstacked
        # theta_da  = thetatrend_unstacked

        if len(m) == 1:
            fp = fpath + 'use/ERA5_trend_synop_WA_sig_poly_tcwv_1991_skt_'+str(m[0]).zfill(2)+'.png'
        else:
            fp = fpath + 'use/ERA5_trend_synop_WA_sig_poly_tcwv_1991_skt_' + str(m[0]).zfill(2) +'-'+ str(m[1]).zfill(2) + '.png'
        map = shear.salem.get_map()
        ti_da = t2d.salem.transform(ti_da)

        f = plt.figure(figsize=(15,8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values, shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)
        #Quiver only every 7th grid point
        u = u6trend_unstacked.values[1::2, 1::2]
        v = v6trend_unstacked.values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = u6_mean.values[1::2, 1::2]
        vv = v6_mean.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        pdic = {
            'tlin' : (t2_mean.values-273.15).astype(np.float64),
            'tmean' : (t2_mean.values-273.15).astype(np.float64),
            'qmean' : (q_mean.values).astype(np.float64),
            'qlin'  : q_da.values,
            'shearlin' : s_da.values,
            'u' : u,
            'v' : v,
            'xx' : xx,
            'yy' : yy,
            'tirmean' : tirm_mean,


        }

        pkl.dump(dicm,
                 open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_WA.p',
                      'wb'))

        ax1 = f.add_subplot(221)
        map.set_data(t_da.values, interp='linear')  # interp='linear'

        map.set_contour((t2_mean.values-273.15).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=[20,23,26,29,32,35])
        map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(ax=ax1, title='2m temperature trend | contours: mean T', cbar_title='K decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(222)
        map.set_data(tcwv_da.values,interp='linear')  # interp='linear'
        map.set_contour((press_mean.values).astype(np.float64),interp='linear', colors='k', levels=[20,30,40,50,60], linewidths=0.5) #[6,8,10,12,14,16]
        map.set_plot_params(levels=[-0.8,-0.6,-0.4,-0.2,0.2,0.4, 0.6,0.8], cmap='RdBu', extend='both')  # levels=np.arange(-0.5,0.51,0.1), [-0.6,-0.4,-0.2,0.2,0.4,0.6]

        dic = map.visualize(ax=ax2, title='925hPa Spec. humidity trend | contours: mean q', cbar_title='g kg-1 decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')


        ax3 = f.add_subplot(223)
        map.set_data(s_da.values, interp='linear')  # interp='linear'
        map.set_contour(s_da.values, interp='linear', levels=np.arange(-15,-8,8), colors='k')

        map.set_plot_params(levels=[-0.8,-0.6,-0.4,-0.2,0.2,0.4, 0.6,0.8], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='925-650hPa wind shear trend, mean 650hPa wind vectors', cbar_title='m s-1 decade-1')
        qu = ax3.quiver(xx, yy, u, v, scale=60, width=0.002)

        qk = plt.quiverkey(qu, 0.4, 0.03, 1, '1 m s$^{-1}$',
                           labelpos='E', coordinates='figure')

        ax4 = f.add_subplot(224)
        map.set_contour((tirm_mean), interp='linear', levels=[0.1,0.5,1,2.5], colors='k', linewidths=0.5)
        #.values).astype(np.float64)

        ti_da.values[ti_da.values==0] = np.nan
        map.set_data(ti_da)  #
        coord = [18, 25, -28, -20]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        #map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,51,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        dic = map.visualize(ax=ax4, title='-70C cloud cover change | >5000km2', cbar_title='$\%$ decade-1', addcbar=True)
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')

    pkl.dump(dicm,
             open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_WA.p',
                  'wb'))

    pkl.dump(dicmean,
                 open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC_WA.p',
                      'wb'))
Ejemplo n.º 9
0
def trend_all():

    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-60_monthly_count_-50base.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box = [-18, 55, -35, 35]  #  [-18,40,0,25] #

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(800, 850)).mean('level')

    q = q[q['time.hour'] == 12]
    t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour'] == 12]
    u925 = da['u'].sel(level=slice(800, 850)).mean('level')
    u925 = u925[u925['time.hour'] == 12]
    u600 = da['u'].sel(level=slice(600, 650)).mean('level')
    u600 = u600[u600['time.hour'] == 12]

    shear = u600 - u925

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])
    q = grid.lookup_transform(q)
    shear = grid.lookup_transform(shear)

    # tir = t2d.salem.lookup_transform(da3['tir'])
    # t2 = t2d
    # #tir = da3['tir']
    # q = q
    # shear = shear

    grid = grid.to_dataset()

    t2 = xr.DataArray(t2,
                      coords=[t2d['time'], grid['y'], grid['x']],
                      dims=['time', 'latitude', 'longitude'])
    q = xr.DataArray(q,
                     coords=[t2d['time'], grid['y'], grid['x']],
                     dims=['time', 'latitude', 'longitude'])
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])
    shear = xr.DataArray(shear,
                         coords=[t2d['time'], grid['y'], grid['x']],
                         dims=['time', 'latitude', 'longitude'])

    months = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'mk'

        if len([m]) == 1:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2,
                                     m,
                                     hour=12,
                                     method=method,
                                     sig=sig,
                                     wilks=False)
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir,
                                       m,
                                       method=method,
                                       sig=sig,
                                       wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q,
                                   m,
                                   hour=12,
                                   method=method,
                                   sig=sig,
                                   wilks=False)
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear,
                                           m,
                                           hour=12,
                                           method=method,
                                           sig=sig,
                                           wilks=False)
        shear_mean = shearmean.mean(axis=0)

        t2trend_unstacked = t2trend * 10.  # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        tirtrend_unstacked = (
            (tirtrend.values) * 10. / tirm_mean.values) * 100.

        tirtrend_out = xr.DataArray(tirtrend_unstacked,
                                    coords=[grid['y'], grid['x']],
                                    dims=['latitude', 'longitude'])
        tirmean_out = xr.DataArray(tirm_mean,
                                   coords=[grid['y'], grid['x']],
                                   dims=['latitude', 'longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirmean_out

        t_da = t2trend_unstacked
        q_da = qtrend_unstacked
        s_da = sheartrend_unstacked
        ti_da = tirtrend_unstacked

        fp = fpath + 'trend_mk_-60C_synop_-50base' + str(
            m[0]).zfill(2) + '_sig.png'
        map = shear.salem.get_map()

        # f = plt.figure(figsize=(8, 5), dpi=300)
        # ax1 = f.add_subplot(221)
        #
        # # map.set_shapefile(rivers=True)
        # map.set_plot_params()
        #
        # map.set_data(t_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu_r', extend='both')
        # map.visualize(ax=ax1, title='t2')
        #
        # ax2 = f.add_subplot(222)
        # map.set_data(q_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu', extend='both')
        # map.visualize(ax=ax2, title='q')
        #
        # ax3 = f.add_subplot(223)
        # map.set_data(s_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-1,1.1,10), cmap='RdBu_r', extend='both')
        # map.visualize(ax=ax3, title='u-shear')
        #
        # ax4 = f.add_subplot(224)
        # map.set_data(ti_da)
        # map.set_plot_params(cmap='Blues', extend='both', levels=np.arange(20,101,20)) #levels=np.arange(20,101,20)
        # map.visualize(ax=ax4, title='-70C frequency')
        #
        # plt.tight_layout()
        # plt.savefig(fp)
        # plt.close('all')

        f = plt.figure(figsize=(13, 7), dpi=300)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(t_da, interp='linear')  # interp='linear'
        contours = map.set_contour(t2_mean - 273.15,
                                   interp='linear',
                                   levels=np.arange(24, 37, 4),
                                   cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(levels=np.linspace(-0.5, 0.5, 10),
                            cmap='RdBu_r',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='925hP temperature')

        ax2 = f.add_subplot(222)
        map.set_data(q_da, interp='linear')  # interp='linear'
        map.set_contour(q_mean,
                        interp='linear',
                        levels=np.arange(5, 19, 3),
                        cmap='inferno')

        map.set_plot_params(levels=np.linspace(-0.5, 0.5, 10),
                            cmap='RdBu',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='925hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(s_da, interp='linear')  # interp='linear'
        map.set_contour(shear_mean,
                        interp='linear',
                        levels=np.arange(-10, 1, 3),
                        cmap='inferno')
        map.set_plot_params(levels=np.linspace(-1, 1, 10),
                            cmap='RdBu_r',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='600-925hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(tirm_mean,
                        interp='linear',
                        levels=[0.1, 0.5, 1, 2.5],
                        cmap='inferno')
        ti_da[ti_da == 0] = np.nan
        map.set_data(ti_da)  #

        map.set_plot_params(
            cmap='viridis', extend='both', levels=np.arange(
                10, 51,
                10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4,
                      title='-70C cloud cover change',
                      cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')

    pkl.dump(
        dicm,
        open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC.p',
             'wb'))

    pkl.dump(
        dicmean,
        open(
            cnst.network_data +
            'data/CLOVER/saves/storm_frac_mean_synop12UTC.p', 'wb'))
Ejemplo n.º 10
0
def corr_all():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_count.nc'

    fpath = '/users/global/cornkle/figs/CLOVER/months/'

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(-18, 40),
                latitude=slice(0, 25))  #latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(-18, 40),
                  latitude=slice(0, 25))  #latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(-18, 40), lat=slice(0, 25))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(850, 925)).mean(dim='level')
    #q = da2['tcwv']
    t2 = da['t'].sel(level=925)

    u925 = da['u'].sel(level=slice(850, 925)).mean(dim='level')
    u600 = da['u'].sel(level=slice(600, 650)).mean(dim='level')

    shear = u600 - u925

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)

    months = np.arange(1, 13)

    #months=[6,7]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour)
                        & (data['time.year'] >= 1983) &
                        (data['time.year'] <= 2013) &
                        (data['time.year'] != 2003)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983)
                        & (data['time.year'] <= 2013) &
                        (data['time.year'] != 2003)]
        data_years = data.groupby('time.year').mean(axis=0)

        diff = xr.DataArray(
            data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
            coords=[data_years.year[1::], data.latitude, data.longitude],
            dims=['year', 'latitude', 'longitude'])

        # unstack back to lat lon coordinates
        return diff, data_years

    months = [9, 10]
    for m in months:

        t2diff, t2year = array_juggling(t2, m, hour=12)
        qdiff, qyear = array_juggling(q, m, hour=12)
        shdiff, sheyear = array_juggling(shear, m, hour=12)
        tirdiff, tiryear = array_juggling(tir, m)

        def corr(a, b):
            ds = xr.Dataset()
            ds['pval'] = a.copy(deep=True).sum('year') * np.nan
            ds['r'] = a.copy(deep=True).sum('year') * np.nan

            for lat in a.latitude.values:
                for lon in a.longitude.values:
                    aa = a.sel(latitude=lat, longitude=lon)
                    bb = b.sel(latitude=lat, longitude=lon)

                    r, p = stats.pearsonr(aa.values, bb.values)

                    if np.nansum(aa.values == 0) >= 10:

                        p = np.nan
                        r = np.nan

                    ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                    ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p

            return ds

        qcorr = corr(tirdiff, qdiff)
        shearcorr = corr(tirdiff, shdiff)
        tcorr = corr(tirdiff, t2diff)

        #pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        #print(pthresh)
        pthresh = 0.05
        qcorr['r'].values[qcorr['pval'].values > pthresh] = np.nan

        #pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        #print(pthresh)
        shearcorr['r'].values[shearcorr['pval'].values > pthresh] = np.nan

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        #print(pthresh)
        tcorr['r'].values[tcorr['pval'].values > pthresh] = np.nan

        fp = fpath + 'corr_synop_' + str(m).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(8, 5), dpi=300)
        ax1 = f.add_subplot(221)

        # map.set_shapefile(rivers=True)
        map.set_plot_params()

        map.set_data(tcorr['r'])  # interp='linear'
        map.set_contour(t2year.mean('year') - 273.15)
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax1, title='t2')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'])  # interp='linear'
        map.set_contour(qyear.mean('year'))
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax2, title='q')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'])  # interp='linear'
        map.set_contour(sheyear.mean('year'))
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax3, title='u-shear')

        ax4 = f.add_subplot(224)
        map.set_data(shearcorr['r'])  # interp='linear'
        map.set_plot_params(cmap='Blues',
                            extend='both',
                            levels=np.arange(20, 101,
                                             20))  #levels=np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C frequency')

        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 11
0
def trend_all():

    pl = cnst.ERA5_MONTHLY_PL_SYNOP_HU  #cnst.ERA_MONTHLY_PL_SYNOP
    #mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'

    fpath = cnst.network_data + 'figs/HUARAZ/monthly/'

    box = [-82, -40, -28, 4]  #  [-18,40,0,25] #

    topo = xr.open_dataset(
        '/media/ck/Elements/SouthAmerica/ERA5/monthly/ERA5_static_synop_0.7deg.nc'
    )
    topo = u_darrays.flip_lat(topo)
    z = topo['z'].isel(number=0, time=0)
    z = z.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],
                                                              box[3])).values

    da = xr.open_mfdataset(pl + '/*.nc')  #xr.open_dataset(pl)
    #da = xr.decode_cf(da)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]),
                time=(da['time.hour'] == 12)).load()
    #ipdb.set_trace()
    #da3 = xr.open_dataarray(mcs)*100
    #da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2],box[3]))

    lons = da.longitude
    lats = da.latitude

    #ipdb.set_trace()

    low_press = 850
    up_press = 200
    mid_press = 550

    gp = da['z'].mean('time') / 9.81

    gp_high = da['z'].sel(level=up_press) / 9.81
    w_mid = da['w'].sel(level=mid_press)

    low_z = gp.sel(level=low_press) > 1400
    mid_z = gp.sel(level=mid_press) > 5500

    #ipdb.set_trace()

    tlow = da['t'].sel(level=low_press).where(low_z) - 273.15
    qlow = da['q'].sel(level=low_press).where(low_z) * 1000

    tmid = da['t'].sel(level=mid_press) - 273.15  #.where(mid_z)-273.15
    qmid = da['q'].sel(level=mid_press) * 1000  #.where(mid_z)*1000

    theta_low = u_met.theta_e(low_press, tlow, qlow)
    theta_high = u_met.theta_e(mid_press, tmid, qmid)

    theta_e = theta_low - theta_high

    u600 = da['u'].sel(level=up_press)  #.where(mid_z)
    v600 = da['v'].sel(level=up_press)  #.where(mid_z)

    u600.name = 'u200'

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=mid_press)  # 200-500 shear
    v800 = da['v'].sel(level=mid_press)

    shear_u = u600 - u800
    shear_v = v600 - v800

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = u600.copy(deep=True)
    ws_600.name = 'ws'

    ws_600.values = ws600[0]

    shear = u600.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    vars = [
        't600', 'q600', 'shear', 'q550', 'u200', 'v200', 'u550', 'v550', 'gp',
        'w550'
    ]
    data = [tmid, qmid, shear, qmid, u600, v600, u800, v800, gp_high, w_mid]

    months = [
        11
    ]  #,6]#,2,3,4,5,6,7,8,9,10,11,12]#[3,4,5,6,9,10,11]#,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    #ipdb.set_trace()
    for m in months:

        dic = {}
        for v in vars:
            dic[v] = 0

        if type(m) == int:
            m = [m]

        for v, dat in zip(vars, data):
            print('Doing ', v)
            dic[v] = get_trend(dat, m, sig=True, wilks=False, method='mk')

        if len(m) == 1:
            fp = fpath + 'low_ERA5_trend_synop_HU_poly_quatro_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'low_ERA5_trend_synop_HU_poly_quatro_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'
        #ipdb.set_trace()

        map = shear.salem.get_map()
        # ti_da = t2d.salem.transform(ti_da)

        f = plt.figure(figsize=(12, 5), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xo, yo = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xH, yH = map.grid.transform(-77.52, -9.52, crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xo, yo)

        ss = 3
        #Quiver only every 7th grid point
        u = (dic['u200'][0]).values[1::ss, 1::ss]  # 200hpa
        v = (dic['v200'][0]).values[1::ss, 1::ss]

        #Quiver only every 7th grid point
        uu = (dic['u200'][1]).values[1::ss, 1::ss]  # 200mean
        vv = (dic['v200'][1]).values[1::ss, 1::ss]

        u500 = (dic['u550'][0]).values[1::ss, 1::ss]
        v500 = (dic['v550'][0]).values[1::ss, 1::ss]

        #Quiver only every 7th grid point
        uu500 = (dic['u550'][1]).values[1::ss, 1::ss]
        vv500 = (dic['v550'][1]).values[1::ss, 1::ss]

        xx = xx[1::ss, 1::ss]
        yy = yy[1::ss, 1::ss]

        ax1 = f.add_subplot(121)
        map.set_data((dic['t600'][0]).values,
                     interp='linear')  # interp='linear'

        map.set_contour(((dic['t600'][1]).values).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5)
        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.5, -0.4, -0.3, -0.2, 0.2, 0.3, 0.4, 0.5]
        )  #levels=[-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5], ,  levels=np.arange(-0.5,0.51,0.1),
        ax1.plot(xH, yH, 'ko', markersize=7)
        ax1.plot(xH, yH, 'wo', markersize=5)
        ax1.text(xH, yH + 10, 'Huaraz', fontsize=11)
        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        cc = ['b', 'r', 'k']
        tags = ['1985-1994', '1995-2004', '2005-2014']
        for ids, pp in enumerate((dic['gp'][2])):
            xh, yh = map.grid.transform(pp[0],
                                        pp[1],
                                        crs=shear.salem.grid.proj)
            if ids < 2:
                ax1.plot(xh - 80,
                         yh - 130,
                         color=cc[ids],
                         marker='x',
                         markersize=10,
                         label=tags[ids])
            else:
                ax1.plot(xh,
                         yh,
                         color=cc[ids],
                         marker='x',
                         markersize=10,
                         label=tags[ids])
            #ax1.text(xh + 10, yh, tags[ids], fontsize=9)
        plt.legend(loc='upper left')
        pdic = map.visualize(
            ax=ax1,
            title='550hpa T trend (shading) and mean (contour)',
            cbar_title='K decade-1')
        contours = pdic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax3 = f.add_subplot(122)
        map.set_data((dic['u200'][0]).values,
                     interp='linear')  # interp='linear'
        #map.set_contour(u6_mean.values, interp='linear', colors='k')
        map.set_contour()
        ax3.plot(xH, yH, 'ko', markersize=7)
        ax3.plot(xH, yH, 'wo', markersize=5)
        map.set_plot_params(
            levels=[-3, -2.5, -2, -1.5, -1, -0.5, 0.5, 1, 1.5, 2, 2.5, 3],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3,
                      title='200hPa wind trend (shading) and mean (vectors)',
                      cbar_title='m s-1 decade-1')
        qu = ax3.quiver(xx, yy, uu, vv, scale=100, width=0.002)

        cc = ['b', 'r', 'k']
        tags = ['1985-1994', '1995-2004', '2005-2014']
        for ii in range(3):
            use = (dic['u200'][2]).values[ii, :]
            for id, xpos in enumerate(xo):
                #ipdb.set_trace()
                ax3.plot(xpos,
                         yo[use[id]],
                         color=cc[ii],
                         marker='o',
                         markersize=4,
                         label=tags[ii])
                # if id == 15:
                #     ax3.text(xpos + 10, yo[use[id]], tags[ii], fontsize=9)
        #plt.legend(loc='upper right')

        qk = plt.quiverkey(qu,
                           0.9,
                           0.03,
                           4,
                           '4 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax1.text(14, -60,
                 '(x) "Bolivian High centre" (max. 200hPa geopotential)')

        ax3.text(14, -60, '(o) Area of easterlies')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 12
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP  # _SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-65_monthly_count_-40base_1000km2.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box = [-10, 55, -35, 0]  #[-18,55,-35,35]#[-10,55,-35,0]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(800)).mean('level')
    #q = q[q['time.hour']==12]
    t2 = da2['t2m']  #.sel(level=slice(800)).mean('level')
    #t2 = t2[t2['time.hour']==12]
    u925 = da['u'].sel(level=slice(800)).mean('level')
    #u925 = u925[u925['time.hour']==12]
    u600 = da['u'].sel(level=slice(500, 550)).mean('level')
    v600 = da['v'].sel(level=slice(500, 550)).mean('level')

    shear = u925  # u600-

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)

    months = np.arange(1, 13)
    months = [1, 12]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour)
                        & (data['time.year'] >= 1983) &
                        (data['time.year'] <= 2017)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983)
                        & (data['time.year'] <= 2017)]
        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(
            data_years.values,
            coords=[data_years.year, data.latitude, data.longitude],
            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean

    def corr(a, b, bsingle=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        #corr_box = [-10,11,4.5,8]
        corr_box = [16, 24, -24, -18]  #West: [13,25,-23,-10]

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]),
                       longitude=slice(
                           corr_box[0],
                           corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    pf = np.polyfit(aa.values, bb, 1)
                else:
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)

                slope = pf[0]

                # if (np.nansum(aa.values == 0) >= 10):
                #     p = np.nan
                #     r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds

    for m in months:

        t2diff, t2year = array_juggling(t2, m, hour=12)  #
        qdiff, qyear = array_juggling(q, m, hour=12)  #, hour=12
        shdiff, sheyear = array_juggling(shear, m, hour=12)  #, hour=12
        vdiff, vyear = array_juggling(v600, m, hour=12)  # , hour=12
        udiff, uyear = array_juggling(u600, m, hour=12)  # , hour=12
        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        mcs_month = mcs_temp[mcs_temp['time.month'] ==
                             m]  # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs)
        tcorr = corr(t2diff, tirdiff, bsingle=bs)

        # pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        pthresh = 0.05
        #cloud['slope'].values[cloud['pval'].values > pthresh] = np.nan
        #qcorr['r'].values[qcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #shearcorr['r'].values[shearcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #tcorr['r'].values[tcorr['pval'].values > pthresh] = 0

        dicm[m].values[dicm[m].values == 0] = np.nan

        print('plot')
        fp = fpath + 'corr_box_SYNOP_SAWest_-50base_' + str(m).zfill(
            2) + '.png'
        map = shear.salem.get_map()
        #xx, yy = map.grid.xy_coordinates

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)

        #ipdb.set_trace()
        f = plt.figure(figsize=(15, 8), dpi=200)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(tcorr['r'], interp='linear')  # interp='linear'
        contours = map.set_contour(t2year - 273.15,
                                   interp='linear',
                                   levels=np.arange(24, 37, 4),
                                   cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6,
                    0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='2m temperature')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'], interp='linear')  # interp='linear'
        map.set_contour(qyear,
                        interp='linear',
                        levels=np.arange(5, 19, 3),
                        cmap='inferno')

        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6,
                    0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='800hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6, 0.7])
        map.set_contour(sheyear,
                        interp='linear',
                        levels=np.arange(-10, 1, 6),
                        cmap='inferno')
        cs = ax3.contour(sheyear,
                         interp='linear',
                         levels=np.arange(-10, 1, 6),
                         cmap='inferno')
        plt.clabel(cs, inline=1, fontsize=10)

        # Quiver only every 7th grid point
        u = uyear[4::7, 4::7]
        v = vyear[4::7, 4::7]

        xx = xx[4::7, 4::7]
        yy = yy[4::7, 4::7]

        qu = ax3.quiver(xx, yy, u.values, v.values, scale=50)

        # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='500-800hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m],
                        interp='linear',
                        levels=[0.1, 0.5, 1, 2.5],
                        cmap='inferno')

        map.set_data(dicm[m])  #
        #ax4.axhspan(-26,18)  #[15,25,-26,-18]
        coord = [16, 24, -24, -18]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        map.set_geometry(geom,
                         zorder=99,
                         color='darkorange',
                         linewidth=3,
                         linestyle='--',
                         alpha=0.3)
        # ax4.axvline(x=25, ymin=-26, ymax=-18)
        # ax4.axhline(y=-26, xmin=15, xmax=25)
        # ax4.axhline(y=-18, xmin=15, xmax=25)

        map.set_plot_params(
            cmap='viridis', extend='both', levels=np.arange(
                10, 51,
                10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4,
                      title='-65C cloud cover change',
                      cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 13
0
def tgrad_shear_trend():

    srfc = cnst.ERA_MONTHLY_SRFC
    pl = cnst.ERA_MONTHLY_PL
    mcs = cnst.GRIDSAT + 'aggs/box_13W-13E-4-8N_meanT-50_from5000km2.nc' #box_25-33E-28-10S_meanT-50_from5000km2.nc'#box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    out = cnst.network_data + 'figs/CLOVER/timeseries/'

    #
    box = [-10,10,5.5,8]
    tpick = [-10,10,6,25]
    Tlons = [-10,10]

    # box = [18,30,-22,-10]
    # tpick = [18,30,-30,-20]
    # Tlons = [18,30]

    dam = xr.open_dataset(srfc)
    dam = u_darrays.flip_lat(dam)
    dam = dam['t2m']

    tgrad = dam.sel(longitude=slice(tpick[0], tpick[1]), latitude=slice(tpick[2],tpick[3])).mean('longitude').groupby('time.month').mean('time')

    Tgrad_lat = []

    for tgrad_ts in tgrad:

        ttgrad = np.argmax(tgrad_ts.values[2::] - tgrad_ts.values[0:-2])

        lat_pos = tgrad.latitude[ttgrad+1]

        Tgrad_lat.append(float(lat_pos))

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))#latitude=slice(36, -37))

    u925 = da['u'].sel(level=925)#(level=slice(850, 925)).mean(dim='level')#).mean(dim='level')  #slice(850
    u600 = da['u'].sel(level=700)#(level=slice(550, 650)).mean(dim='level')

    qq925 = da['q'].sel(level=slice(850,925)).mean(dim='level')
    qq600 = da['q'].sel(level=slice(550, 650)).mean(dim='level')

    mcs_temp = xr.open_dataset(mcs)
    mcs_temp = mcs_temp['tir']
    months = [1,2,3,4,5,6,7,8,9,10,11,12]
    mnames = {1:'January',2:'February', 3 : 'March', 4 : 'April', 5 : 'May', 6:'June', 7:'July', 8:'August', 9 : 'September', 10 : 'October', 11:'November', 12:'December'}

    mcs_change = []
    shear_change = []
    tgrad_change = []

    for m in months:

        tsouthlice = slice(Tgrad_lat[m-1]-1.25, Tgrad_lat[m-1]-0.25)
        tnorthslice = slice(Tgrad_lat[m-1]+0.25, Tgrad_lat[m-1]+1.25)

        tsouth = dam.sel(longitude=slice(Tlons[0], Tlons[1]), latitude=slice(5,8))
        tnorth = dam.sel(longitude=slice(Tlons[0], Tlons[1]), latitude=slice(11,18))

        south = tsouth[(tsouth['time.month']==m)]
        north = tnorth[(tnorth['time.month']==m)]
        ulow = u925[(u925['time.month']==m)]
        uhigh = u600[(u600['time.month']==m)]
        mcs_month = mcs_temp[mcs_temp['time.month']==m]#*(-1)
        qlow = qq925[(qq925['time.month']==m)]
        qmid = qq600[(qq600['time.month'] == m)]
        ##da = da.sel(longitude=slice(-18,51), latitude=slice(36, -37))

        south_peryear = south.groupby('time.year').mean('longitude').min('latitude')
        north_peryear = north.groupby('time.year').mean('longitude').max('latitude')

        u925_peryear = ulow.groupby('time.year').mean('longitude').max('latitude') #ulow.groupby('time.year').mean()

        u600_peryear = uhigh.groupby('time.year').mean('longitude').min('latitude')#.mean() # ('latitude').min()

        qlow_peryear = qlow.groupby('time.year').mean('longitude').max('latitude')#.mean() # ('latitude').min()
        qmid_peryear = qmid.groupby('time.year').mean('longitude').max('latitude')


        tgrad = ((north_peryear-south_peryear)[4::])
        shear = (u600_peryear-u925_peryear)[4::] # -q_peryear[4::]#
        q = qlow_peryear[4::]*1000

        r = stats.pearsonr(shear.values[1::]-shear.values[0:-1],mcs_month.values[1::]-mcs_month.values[0:-1])
        rq = stats.pearsonr(q.values[1::]-q.values[0:-1],mcs_month.values[1::]-mcs_month.values[0:-1])
        tshear_cor = stats.pearsonr(shear.values[1::]-shear.values[0:-1],tgrad.values[1::]-tgrad.values[0:-1])

        #sslope, sint = ustats.linear_trend(shear)
        sslope, sint = ustats.linear_trend(shear)
        sslope, sint, sr, sp, sstd_err = stats.linregress(np.arange(len(shear)), shear)
        try:
            #mslope, mint = ustats.linear_trend(mcs_month)
            mslope, mint, mrr, mp, mstd_err = stats.linregress(np.arange(len(mcs_month)), mcs_month)

        except:
            continue

        mr = stats.pearsonr(np.arange(len(mcs_month)), mcs_month)
        print(m, mr, mrr, mslope)
        sig = 'SIG'
        if mr[1]>0.05:
            #continue
            sig = 'not_sig'

        mcs_change.append(mcs_month[-5::].mean()-mcs_month[0:5].mean())
        shear_change.append(shear[-5::].mean()-shear[0:5].mean())
        tgrad_change.append(tgrad[-5::].mean()-tgrad[0:5].mean())

        x = np.arange(0,len(shear))
        rr = r[0]
        f=plt.figure(figsize=(6,3))
        ax = f.add_subplot(111)
        ax.plot(tgrad.year, shear, 'x-', label='Zonal wind shear 600-925hPa', color='k')
        ax.plot(tgrad.year, sint + x*sslope, '--', color='k')
        ax.text(0.5,0.8, 'MCS: '+ sig +'_' + str(np.round(mslope,decimals=2)*10) + ' p-val: ' + str(np.round(mp, decimals=2)), transform=ax.transAxes)
        ax.set_ylim(-8,0)
        #ax.set_ylim(9.8, 10)
        ax1 = ax.twinx()
        ax1.plot(mcs_month['time.year'],mcs_month, 'o-', label='Mean MCS temp.', color='r')
        ax1.plot(tgrad.year, mint + x*mslope, '--', color='r')
        mcsline = lines.Line2D([],[], color='r', label='Mean MCS temp.', linestyle='solid', marker='o')
        shearline = lines.Line2D([],[], color='k', label='Zonal wind shear 600-925hPa', linestyle='solid', marker='x', markersize=5)
        ax1.set_ylabel('degC')
        ax.set_ylabel('m s-1')
        ax.set_title(mnames[m]+' | Corr.:'+ str(np.round(rr, decimals=2)) +'|' + str(np.round(rq[0], decimals=2)) + '| Tgrad/Shear corr: ' + str(np.round(tshear_cor[0], decimals=2)))
        if m==3:
            ax.legend(handles=[mcsline, shearline], loc='lower left')
        f.savefig(out + 'timeseries_WA'+str(m)+'.png')

        plt.close('all')
Ejemplo n.º 14
0
def trend_all():

    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-65_monthly_count_-40base_15-21UTC_1000km2.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box=[5,55,-36,0]#  [-18,40,0,25] #

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2],box[3]))

    lons = da.longitude
    lats = da.latitude


    press = da2['sp']
    press = press[press['time.hour'] == 12]
    press.values = press.values*1000
    low_press = 850
    up_press = 550

    q = da['q'].sel(level=slice(low_press-100, low_press)).mean('level')
    q = q[q['time.hour']==12]
    t2d = da2['t2m']#['t2m']
    #t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour']==12]

    u600 = da['u'].sel(level=slice(up_press-100, up_press)).mean('level')
    u600 = u600[u600['time.hour']==12]
    v600 = da['v'].sel(level=slice(up_press-100, up_press)).mean('level')
    v600 = v600[v600['time.hour']==12]

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=slice(low_press-100, low_press)).mean('level')
    u800 = u800[u800['time.hour']==12]

    v800 = da['v'].sel(level=slice(low_press-100, low_press)).mean('level')
    v800 = v800[v800['time.hour']==12]

    shear_u = u600-u800 #u600-
    shear_v = v600-v800 # v600-

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'
    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    u6 = u600
    v6 = v600

    q.values = q.values*1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = t2d # grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])

    grid = grid.to_dataset()
    tir = xr.DataArray(tir, coords=[da3['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])

    months= [(11,1), 2,3,10]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'mk'

        if type(m)==int:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2, m,  method=method, sig=sig,hour=12, wilks=False) #hour=12,
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir, m, method=method, sig=sig, wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q, m, method=method, sig=sig,hour=12, wilks=False) #hour=12,
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear, m, method=method, sig=sig,hour=12, wilks=False) #hour=12,
        shear_mean = shearmean.mean(axis=0)

        u6trend, u6mean = calc_trend(u6, m,  method=method, sig=sig, hour=12,wilks=False) #hour=12,
        u6_mean = u6mean.mean(axis=0)
        v6trend, v6mean = calc_trend(v6, m, method=method, sig=sig, hour=12,wilks=False) #hour=12,
        v6_mean = v6mean.mean(axis=0)

        t2trend_unstacked = t2trend*10. # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        u6trend_unstacked = u6trend * 10
        v6trend_unstacked = v6trend * 10

        tirtrend_unstacked = ((tirtrend.values)*10. / tirm_mean.values) * 100.

        tirtrend_out = xr.DataArray(tirtrend_unstacked, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])
        tirmean_out = xr.DataArray(tirm_mean, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirmean_out

        t_da = t2trend_unstacked
        q_da = qtrend_unstacked
        s_da = sheartrend_unstacked
        ti_da = tirtrend_unstacked

        if len(m) == 1:
            fp = fpath + 'trend_synop_'+str(m[0]).zfill(2)+'.png'
        else:
            fp = fpath + 'trend_synop_' + str(m[0]).zfill(2) +'-'+ str(m[1]).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(15,8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values, shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)
        #Quiver only every 7th grid point
        u = u6trend_unstacked.values[1::2, 1::2]
        v = v6trend_unstacked.values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = u6_mean.values[1::2, 1::2]
        vv = v6_mean.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        ax1 = f.add_subplot(221)
        map.set_data(t_da.values, interp='linear')  # interp='linear'

        map.set_contour((t2_mean.values-273.15).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=[20,23,26,29,32,35])
        map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(ax=ax1, title='2m temperature trend | contours: mean T', cbar_title='K decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(222)
        map.set_data(q_da.values,interp='linear')  # interp='linear'
        map.set_contour((q_mean.values).astype(np.float64),interp='linear', colors='k', levels=[6,8,10,12,14,16], linewidths=0.5)
        map.set_plot_params(levels=[-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4], cmap='RdBu', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(ax=ax2, title='800hPa Spec. humidity trend | contours: mean q', cbar_title='g kg-1 decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')


        ax3 = f.add_subplot(223)
        map.set_data(s_da.values, interp='linear')  # interp='linear'
        map.set_contour(s_da.values, interp='linear', levels=np.arange(-7,7,8), cmap='Blues')

        map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='800-500hPa wind shear trend, mean 500hPa wind vectors', cbar_title='m s-1 decade-1')
        qu = ax3.quiver(xx, yy, uu, vv, scale=80, width=0.002)

        qk = plt.quiverkey(qu, 0.4, 0.03, 4, '4 m s$^{-1}$',
                           labelpos='E', coordinates='figure')

        ax4 = f.add_subplot(224)
        map.set_contour(tirm_mean.values, interp='linear', levels=[0.1,0.5,1,2.5], colors='k', linewidths=0.5)


        ti_da[ti_da==0] = np.nan
        map.set_data(ti_da)  #
        coord = [18, 25, -28, -20]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        #map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,41,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        dic = map.visualize(ax=ax4, title='-65C cloud cover change | >1000km2 -40C', cbar_title='$\%$ decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')

    pkl.dump(dicm,
             open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_SA.p',
                  'wb'))

    pkl.dump(dicmean,
                 open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC_SA.p',
                      'wb'))
Ejemplo n.º 15
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP # _SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-65_monthly_count_-40base_1000km2.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/'


    box=[-10,55,-35,0]#[-18,55,-35,35]#[-10,55,-35,0]


    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude


    q = da['q'].sel(level=slice(800)).mean('level')
    #q = q[q['time.hour']==12]
    t2 = da2['t2m']#.sel(level=slice(800)).mean('level')
    #t2 = t2[t2['time.hour']==12]
    u925 = da['u'].sel(level=slice(800)).mean('level')
    #u925 = u925[u925['time.hour']==12]
    u600 = da['u'].sel(level=slice(500,550)).mean('level')
    v600 = da['v'].sel(level=slice(500,550)).mean('level')

    shear = u925 # u600-

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)


    months = np.arange(1, 13)
    months = [1,12]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour) & (data['time.year'] >= 1983)& (
            data['time.year'] <= 2017)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(data_years.values, coords=[data_years.year, data.latitude, data.longitude],
                            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean


    def corr(a, b, bsingle=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        #corr_box = [-10,11,4.5,8]
        corr_box = [16,24,-24,-18]#West: [13,25,-23,-10]

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]), longitude=slice(corr_box[0], corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    pf = np.polyfit(aa.values, bb, 1)
                else:
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)


                slope = pf[0]

                # if (np.nansum(aa.values == 0) >= 10):
                #     p = np.nan
                #     r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds

    for m in months:

        t2diff, t2year = array_juggling(t2, m, hour=12) #
        qdiff, qyear = array_juggling(q, m, hour=12) #, hour=12
        shdiff, sheyear = array_juggling(shear, m, hour=12) #, hour=12
        vdiff, vyear = array_juggling(v600, m, hour=12)  # , hour=12
        udiff, uyear = array_juggling(u600, m, hour=12)  # , hour=12
        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        mcs_month = mcs_temp[mcs_temp['time.month'] == m] # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs)
        tcorr = corr(t2diff, tirdiff, bsingle=bs)


        # pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        pthresh = 0.05
        #cloud['slope'].values[cloud['pval'].values > pthresh] = np.nan
        #qcorr['r'].values[qcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #shearcorr['r'].values[shearcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #tcorr['r'].values[tcorr['pval'].values > pthresh] = 0

        dicm[m].values[dicm[m].values==0] = np.nan

        print('plot')
        fp = fpath + 'corr_box_SYNOP_SAWest_-50base_' + str(m).zfill(2) + '.png'
        map = shear.salem.get_map()
        #xx, yy = map.grid.xy_coordinates

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values, shear.latitude.values,
                                     crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx,yy)

        #ipdb.set_trace()
        f = plt.figure(figsize=(15,8), dpi=200)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(tcorr['r'], interp='linear')  # interp='linear'
        contours = map.set_contour(t2year-273.15, interp='linear', levels=np.arange(24,37,4), cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='2m temperature')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'],interp='linear')  # interp='linear'
        map.set_contour(qyear,interp='linear', levels=np.arange(5,19,3), cmap='inferno')

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='800hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        map.set_plot_params(cmap='RdBu_r', extend='both', levels=[-0.7,-0.6,-0.5,-0.4, 0.4,0.5,0.6,0.7])
        map.set_contour(sheyear, interp='linear', levels=np.arange(-10,1,6), cmap='inferno')
        cs = ax3.contour(sheyear, interp='linear', levels=np.arange(-10,1,6), cmap='inferno')
        plt.clabel(cs, inline=1, fontsize=10)

        # Quiver only every 7th grid point
        u = uyear[4::7, 4::7]
        v = vyear[4::7, 4::7]

        xx = xx[4::7, 4::7]
        yy = yy[4::7, 4::7]


        qu = ax3.quiver(xx, yy, u.values, v.values, scale=50)

          # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='500-800hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m], interp='linear', levels=[0.1,0.5,1,2.5], cmap='inferno')

        map.set_data(dicm[m])  #
        #ax4.axhspan(-26,18)  #[15,25,-26,-18]
        coord = [16,24,-24,-18]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)
        # ax4.axvline(x=25, ymin=-26, ymax=-18)
        # ax4.axhline(y=-26, xmin=15, xmax=25)
        # ax4.axhline(y=-18, xmin=15, xmax=25)

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,51,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4, title='-65C cloud cover change', cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 16
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP  # _SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-60_monthly_count_-50base.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    dicm = pkl.load(
        open(
            cnst.network_data +
            'data/CLOVER/saves/storm_frac_synop12UTC_test.p', 'rb'))
    dicmean = pkl.load(
        open(
            cnst.network_data +
            'data/CLOVER/saves/storm_frac_mean_synop12UTC_test.p', 'rb'))

    mcsbox = cnst.GRIDSAT + 'aggs/box_12-25E-23-10S_meanT-50_from5000km2_SA.nc'  # box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    mcs_temp = xr.open_dataset(mcsbox)
    mcs_temp = mcs_temp['tir']

    box = [-18, 55, -35, 35]
    #box = [-18, 40, 0, 25]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(800, 850)).mean('level')
    #q = q[q['time.hour']==12]
    t2 = da['t'].sel(level=slice(800, 850)).mean('level')
    #t2 = t2[t2['time.hour']==12]
    u925 = da['u'].sel(level=slice(800, 850)).mean('level')
    #u925 = u925[u925['time.hour']==12]
    u600 = da['u'].sel(level=slice(600, 650)).mean('level')
    #u600 = u600[u600['time.hour']==12]

    shear = u600 - u925  # u600-

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)

    months = np.arange(1, 13)
    months = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour)
                        & (data['time.year'] >= 1983) &
                        (data['time.year'] <= 2017)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983)
                        & (data['time.year'] <= 2017)]
        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(
            data_years.values,
            coords=[data_years.year, data.latitude, data.longitude],
            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean

    def corr(a, b, bsingle=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        #corr_box = [-10,11,4.5,8]
        corr_box = [13, 25, -23, -10]

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]),
                       longitude=slice(
                           corr_box[0],
                           corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    pf = np.polyfit(aa.values, bb, 1)
                else:
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)

                slope = pf[0]

                # if (np.nansum(aa.values == 0) >= 10):
                #     p = np.nan
                #     r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds

    for m in months:

        t2diff, t2year = array_juggling(t2, m, hour=12)  #
        qdiff, qyear = array_juggling(q, m, hour=12)  #, hour=12
        shdiff, sheyear = array_juggling(shear, m, hour=12)  #, hour=12
        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        mcs_month = mcs_temp[mcs_temp['time.month'] ==
                             m]  # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs)
        tcorr = corr(t2diff, tirdiff, bsingle=bs)

        # pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        pthresh = 0.05
        #cloud['slope'].values[cloud['pval'].values > pthresh] = np.nan
        #qcorr['r'].values[qcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #shearcorr['r'].values[shearcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #tcorr['r'].values[tcorr['pval'].values > pthresh] = 0

        dicm[m].values[dicm[m].values == 0] = np.nan

        print('plot')
        fp = fpath + 'corr_box_SYNOP_SAWest_-50base' + str(m).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(13, 7), dpi=300)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(tcorr['r'], interp='linear')  # interp='linear'
        contours = map.set_contour(t2year - 273.15,
                                   interp='linear',
                                   levels=np.arange(24, 37, 4),
                                   cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6,
                    0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='925hP temperature')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'], interp='linear')  # interp='linear'
        map.set_contour(qyear,
                        interp='linear',
                        levels=np.arange(5, 19, 3),
                        cmap='inferno')

        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6,
                    0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='925hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        map.set_plot_params(
            cmap='RdBu_r',
            extend='both',
            levels=[-0.7, -0.6, -0.5, -0.4, 0.4, 0.5, 0.6, 0.7])
        map.set_contour(sheyear,
                        interp='linear',
                        levels=np.arange(-10, 1, 3),
                        cmap='inferno')
        # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='600-925hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m],
                        interp='linear',
                        levels=[0.1, 0.5, 1, 2.5],
                        cmap='inferno')
        map.set_data(dicm[m])  #

        map.set_plot_params(
            cmap='viridis', extend='both', levels=np.arange(
                10, 51,
                10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4,
                      title='-70C cloud cover change',
                      cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 17
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-65_monthly_count_-40base_15-21UTC_1000km2.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    dicm = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_SA.p', 'rb'))
    dicmean = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC_SA.p', 'rb'))

    # mcsbox = cnst.GRIDSAT + 'aggs/SAboxWest_meanT-40_1000km2.nc' # box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    # mcs_temp = xr.open_dataset(mcsbox)
    # mcs_temp = mcs_temp['tir']

    box=[5,55,-36,0]#[-18,55,-35,35]#[-10,55,-35,0]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    press = da2['sp']
    press = press[press['time.hour'] == 12]
    press.values = press.values*1000
    low_press = 850
    up_press = 550

    q = da['q'].sel(level=slice(low_press-50, low_press)).mean('level')
    q = q[q['time.hour']==12]
    t2d = da2['t2m']#['t2m']
    #t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour']==12]

    u600 = da['u'].sel(level=slice(up_press-50, up_press)).mean('level')
    u600 = u600[u600['time.hour']==12]
    v600 = da['v'].sel(level=slice(up_press-50, up_press)).mean('level')
    v600 = v600[v600['time.hour']==12]

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=slice(low_press-50, low_press)).mean('level')
    u800 = u800[u800['time.hour']==12]

    v800 = da['v'].sel(level=slice(low_press-50, low_press)).mean('level')
    v800 = v800[v800['time.hour']==12]

    shear_u = u600-u800 #u600-
    shear_v = v600-v800 # v600-

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'
    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2d.salem.lookup_transform(tir)

    def array_juggling(data, month, hour=None):

        m = month

        if hour is not None:
            if len(month) > 1:

                data = data[((data['time.month'] >= month[0]) | (data['time.month'] <= month[1])) & (
                            data['time.hour'] == hour) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
            else:

                data = data[
                    (data['time.month'] == month[0]) & (data['time.hour'] == hour) & (data['time.year'] >= 1983) & (
                                data['time.year'] <= 2017)]
        else:
            if len(month) > 1:
                data = data[((data['time.month'] >= month[0]) | (data['time.month'] <= month[1])) & (
                            data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
            else:
                data = data[
                    (data['time.month'] == month[0]) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]

        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(data_years.values, coords=[data_years.year, data.latitude, data.longitude],
                            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean


    def corr(a, b, bsingle=None, c_box=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        corr_box = c_box

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]), longitude=slice(corr_box[0], corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    pf = np.polyfit(aa.values, bb, 1)
                else:
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)


                slope = pf[0]

                if (np.nansum(aa.values == 0) >= 10):
                    p = np.nan
                    r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds



    box_dic = {

        1 : [16,30,-25,-20],
        2 : [12,28,-10,3],
        3: [16,25,-23,-18],
        4: [16,25,-23,-18],
        10 : [14,28,-15,-5],
        11 : [16,30,-20,-10],
        12 : [16,30,-22,-12],
        (11,1) : [18,30,-23,-18]
    }


    months = [1,2,3,4,10,11,12]
    months = [10]
    months = [(11,1),2,3,10]

    for m in months:

        c_box = box_dic[m]

        if type(m)==int:
            m = [m]

        t2diff, t2year = array_juggling(t2d, [10]) #
        qdiff, qyear = array_juggling(q, m) #, hour=12
        shdiff, sheyear = array_juggling(shear, m) #, hour=12
        vdiff, vyear = array_juggling(v600, m)  # , hour=12
        udiff, uyear = array_juggling(u600, m)  # , hour=12
        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        #mcs_month = mcs_temp[mcs_temp['time.month'] == m] # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs, c_box=c_box)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs, c_box=c_box)
        tcorr = corr(t2diff, tirdiff, bsingle=bs, c_box=c_box)

        dicm[m[0]].values[dicm[m[0]].values==0] = np.nan

        print('plot')

        if len(m) == 1:
            fp = fpath + 'corr_mid_-70C_synop_-50base_linear_OCT_'+str(m[0]).zfill(2)+'.png'
        else:
            fp = fpath + 'corr_mid_-70C_synop_-50base_linear_OCT_' + str(m[0]).zfill(2) +'-'+ str(m[1]).zfill(2) + '.png'


        map = shear.salem.get_map()

        xx, yy = map.grid.transform(shear.longitude.values, shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)

        # Quiver only every 7th grid point
        u = uyear.values[1::2, 1::2]
        v = vyear.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        #ipdb.set_trace()
        f = plt.figure(figsize=(15,8), dpi=350)
        ax1 = f.add_subplot(221)

        map.set_data(tcorr['r'], interp='linear')  # interp='linear'
        map.set_contour(t2year-273.15, interp='linear', levels=np.arange(24,37,4),colors='k', linewidths=0.5)


        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,-0.3,0.3,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        dic = map.visualize(ax=ax1, title='2m temperature corr. | contours: mean T', cbar_title='K decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'],interp='linear')  # interp='linear'
        map.set_contour(qyear,interp='linear', levels=np.arange(5,19,3), colors='k', linewidths=0.5)

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,-0.3,0.3,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        dic = map.visualize(ax=ax2, title='800hPa Spec. humidity corr. | contours: mean q', cbar_title='g kg-1 decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        map.set_plot_params(cmap='RdBu_r', extend='both', levels=[-0.7,-0.6,-0.5,-0.4, -0.3,0.3,0.4,0.5,0.6,0.7])
        #map.set_contour(sheyear, interp='linear', levels=np.arange(-10,1,6), colors='k', linewidths=0.5)

        qu = ax3.quiver(xx, yy, u, v, scale=50, width=0.002)
        qk = plt.quiverkey(qu, 0.4, 0.03, 4, '4 m s$^{-1}$',
                           labelpos='E', coordinates='figure')

          # levels=np.arange(-0.5,0.51,0.1)
        dic = map.visualize(ax=ax3, title='800-500hPa wind shear corr., mean 500hPa wind vectors', cbar_title='m s-1 decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m[0]], interp='linear', levels=[0.1,0.5,1,2.5], colors='k', linewidths=0.5)

        map.set_data(dicm[m[0]])  #
        #ax4.axhspan(-26,18)  #[15,25,-26,-18]
        coord = c_box#[17, 25, -28, -22]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)
        # ax4.axvline(x=25, ymin=-26, ymax=-18)
        # ax4.axhline(y=-26, xmin=15, xmax=25)
        # ax4.axhline(y=-18, xmin=15, xmax=25)

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,41,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        dic = map.visualize(ax=ax4, title='-65C cloud cover change | >1000km2 -40C', cbar_title='$\%$ decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 18
0
def trend_all():

    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-60_monthly_count_-50base.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box=[-18,55,-35,35]#  [-18,40,0,25] #

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],box[3]))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2],box[3]))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(800,850)).mean('level')

    q = q[q['time.hour']==12]
    t2d = da['t'].sel(level=slice(800,850)).mean('level')
    t2d = t2d[t2d['time.hour']==12]
    u925 = da['u'].sel(level=slice(800,850)).mean('level')
    u925 = u925[u925['time.hour']==12]
    u600 = da['u'].sel(level=slice(600,650)).mean('level')
    u600 = u600[u600['time.hour']==12]

    shear = u600-u925

    q.values = q.values*1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])
    q = grid.lookup_transform(q)
    shear = grid.lookup_transform(shear)

    # tir = t2d.salem.lookup_transform(da3['tir'])
    # t2 = t2d
    # #tir = da3['tir']
    # q = q
    # shear = shear

    grid = grid.to_dataset()

    t2 = xr.DataArray(t2, coords=[t2d['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])
    q = xr.DataArray(q, coords=[t2d['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])
    tir = xr.DataArray(tir, coords=[da3['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])
    shear = xr.DataArray(shear, coords=[t2d['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])

    months=[1,2,3,4,5,6,7,8,9, 10,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'mk'

        if len([m])==1:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2, m, hour=12, method=method, sig=sig, wilks=False)
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir, m, method=method, sig=sig, wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q, m, hour=12, method=method, sig=sig, wilks=False)
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear, m, hour=12, method=method, sig=sig, wilks=False)
        shear_mean = shearmean.mean(axis=0)

        t2trend_unstacked = t2trend*10. # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        tirtrend_unstacked = ((tirtrend.values)*10. / tirm_mean.values) * 100.

        tirtrend_out = xr.DataArray(tirtrend_unstacked, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])
        tirmean_out = xr.DataArray(tirm_mean, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirmean_out

        t_da = t2trend_unstacked
        q_da = qtrend_unstacked
        s_da = sheartrend_unstacked
        ti_da = tirtrend_unstacked

        fp = fpath + 'trend_mk_-60C_synop_-50base'+str(m[0]).zfill(2)+'_sig.png'
        map = shear.salem.get_map()

        # f = plt.figure(figsize=(8, 5), dpi=300)
        # ax1 = f.add_subplot(221)
        #
        # # map.set_shapefile(rivers=True)
        # map.set_plot_params()
        #
        # map.set_data(t_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu_r', extend='both')
        # map.visualize(ax=ax1, title='t2')
        #
        # ax2 = f.add_subplot(222)
        # map.set_data(q_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu', extend='both')
        # map.visualize(ax=ax2, title='q')
        #
        # ax3 = f.add_subplot(223)
        # map.set_data(s_da, interp='linear')
        # map.set_plot_params(levels=np.linspace(-1,1.1,10), cmap='RdBu_r', extend='both')
        # map.visualize(ax=ax3, title='u-shear')
        #
        # ax4 = f.add_subplot(224)
        # map.set_data(ti_da)
        # map.set_plot_params(cmap='Blues', extend='both', levels=np.arange(20,101,20)) #levels=np.arange(20,101,20)
        # map.visualize(ax=ax4, title='-70C frequency')
        #
        # plt.tight_layout()
        # plt.savefig(fp)
        # plt.close('all')

        f = plt.figure(figsize=(13,7), dpi=300)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(t_da, interp='linear')  # interp='linear'
        contours = map.set_contour(t2_mean-273.15, interp='linear', levels=np.arange(24,37,4), cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='925hP temperature')

        ax2 = f.add_subplot(222)
        map.set_data(q_da,interp='linear')  # interp='linear'
        map.set_contour(q_mean,interp='linear', levels=np.arange(5,19,3), cmap='inferno')

        map.set_plot_params(levels=np.linspace(-0.5,0.5,10), cmap='RdBu', extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='925hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(s_da, interp='linear')  # interp='linear'
        map.set_contour(shear_mean, interp='linear', levels=np.arange(-10,1,3), cmap='inferno')
        map.set_plot_params(levels=np.linspace(-1,1,10), cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='600-925hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(tirm_mean, interp='linear', levels=[0.1,0.5,1,2.5], cmap='inferno')
        ti_da[ti_da==0] = np.nan
        map.set_data(ti_da)  #


        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,51,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C cloud cover change', cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')

    pkl.dump(dicm,
             open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC.p',
                  'wb'))

    pkl.dump(dicmean,
                 open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC.p',
                      'wb'))
Ejemplo n.º 19
0
def tgrad_shear_trend():

    srfc = cnst.ERA_MONTHLY_SRFC
    pl = cnst.ERA_MONTHLY_PL
    mcs = cnst.GRIDSAT + 'aggs/box_13W-13E-4-8N_meanT-50_from5000km2.nc'  #box_25-33E-28-10S_meanT-50_from5000km2.nc'#box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    out = cnst.network_data + 'figs/CLOVER/timeseries/'

    #
    box = [-10, 10, 5.5, 8]
    tpick = [-10, 10, 6, 25]
    Tlons = [-10, 10]

    # box = [18,30,-22,-10]
    # tpick = [18,30,-30,-20]
    # Tlons = [18,30]

    dam = xr.open_dataset(srfc)
    dam = u_darrays.flip_lat(dam)
    dam = dam['t2m']

    tgrad = dam.sel(
        longitude=slice(tpick[0], tpick[1]),
        latitude=slice(
            tpick[2],
            tpick[3])).mean('longitude').groupby('time.month').mean('time')

    Tgrad_lat = []

    for tgrad_ts in tgrad:

        ttgrad = np.argmax(tgrad_ts.values[2::] - tgrad_ts.values[0:-2])

        lat_pos = tgrad.latitude[ttgrad + 1]

        Tgrad_lat.append(float(lat_pos))

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))

    u925 = da['u'].sel(
        level=925
    )  #(level=slice(850, 925)).mean(dim='level')#).mean(dim='level')  #slice(850
    u600 = da['u'].sel(level=700)  #(level=slice(550, 650)).mean(dim='level')

    qq925 = da['q'].sel(level=slice(850, 925)).mean(dim='level')
    qq600 = da['q'].sel(level=slice(550, 650)).mean(dim='level')

    mcs_temp = xr.open_dataset(mcs)
    mcs_temp = mcs_temp['tir']
    months = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
    mnames = {
        1: 'January',
        2: 'February',
        3: 'March',
        4: 'April',
        5: 'May',
        6: 'June',
        7: 'July',
        8: 'August',
        9: 'September',
        10: 'October',
        11: 'November',
        12: 'December'
    }

    mcs_change = []
    shear_change = []
    tgrad_change = []

    for m in months:

        tsouthlice = slice(Tgrad_lat[m - 1] - 1.25, Tgrad_lat[m - 1] - 0.25)
        tnorthslice = slice(Tgrad_lat[m - 1] + 0.25, Tgrad_lat[m - 1] + 1.25)

        tsouth = dam.sel(longitude=slice(Tlons[0], Tlons[1]),
                         latitude=slice(5, 8))
        tnorth = dam.sel(longitude=slice(Tlons[0], Tlons[1]),
                         latitude=slice(11, 18))

        south = tsouth[(tsouth['time.month'] == m)]
        north = tnorth[(tnorth['time.month'] == m)]
        ulow = u925[(u925['time.month'] == m)]
        uhigh = u600[(u600['time.month'] == m)]
        mcs_month = mcs_temp[mcs_temp['time.month'] == m]  #*(-1)
        qlow = qq925[(qq925['time.month'] == m)]
        qmid = qq600[(qq600['time.month'] == m)]
        ##da = da.sel(longitude=slice(-18,51), latitude=slice(36, -37))

        south_peryear = south.groupby('time.year').mean('longitude').min(
            'latitude')
        north_peryear = north.groupby('time.year').mean('longitude').max(
            'latitude')

        u925_peryear = ulow.groupby('time.year').mean('longitude').max(
            'latitude')  #ulow.groupby('time.year').mean()

        u600_peryear = uhigh.groupby('time.year').mean('longitude').min(
            'latitude')  #.mean() # ('latitude').min()

        qlow_peryear = qlow.groupby('time.year').mean('longitude').max(
            'latitude')  #.mean() # ('latitude').min()
        qmid_peryear = qmid.groupby('time.year').mean('longitude').max(
            'latitude')

        tgrad = ((north_peryear - south_peryear)[4::])
        shear = (u600_peryear - u925_peryear)[4::]  # -q_peryear[4::]#
        q = qlow_peryear[4::] * 1000

        r = stats.pearsonr(shear.values[1::] - shear.values[0:-1],
                           mcs_month.values[1::] - mcs_month.values[0:-1])
        rq = stats.pearsonr(q.values[1::] - q.values[0:-1],
                            mcs_month.values[1::] - mcs_month.values[0:-1])
        tshear_cor = stats.pearsonr(shear.values[1::] - shear.values[0:-1],
                                    tgrad.values[1::] - tgrad.values[0:-1])

        #sslope, sint = ustats.linear_trend(shear)
        sslope, sint = ustats.linear_trend(shear)
        sslope, sint, sr, sp, sstd_err = stats.linregress(
            np.arange(len(shear)), shear)
        try:
            #mslope, mint = ustats.linear_trend(mcs_month)
            mslope, mint, mrr, mp, mstd_err = stats.linregress(
                np.arange(len(mcs_month)), mcs_month)

        except:
            continue

        mr = stats.pearsonr(np.arange(len(mcs_month)), mcs_month)
        print(m, mr, mrr, mslope)
        sig = 'SIG'
        if mr[1] > 0.05:
            #continue
            sig = 'not_sig'

        mcs_change.append(mcs_month[-5::].mean() - mcs_month[0:5].mean())
        shear_change.append(shear[-5::].mean() - shear[0:5].mean())
        tgrad_change.append(tgrad[-5::].mean() - tgrad[0:5].mean())

        x = np.arange(0, len(shear))
        rr = r[0]
        f = plt.figure(figsize=(6, 3))
        ax = f.add_subplot(111)
        ax.plot(tgrad.year,
                shear,
                'x-',
                label='Zonal wind shear 600-925hPa',
                color='k')
        ax.plot(tgrad.year, sint + x * sslope, '--', color='k')
        ax.text(0.5,
                0.8,
                'MCS: ' + sig + '_' + str(np.round(mslope, decimals=2) * 10) +
                ' p-val: ' + str(np.round(mp, decimals=2)),
                transform=ax.transAxes)
        ax.set_ylim(-8, 0)
        #ax.set_ylim(9.8, 10)
        ax1 = ax.twinx()
        ax1.plot(mcs_month['time.year'],
                 mcs_month,
                 'o-',
                 label='Mean MCS temp.',
                 color='r')
        ax1.plot(tgrad.year, mint + x * mslope, '--', color='r')
        mcsline = lines.Line2D([], [],
                               color='r',
                               label='Mean MCS temp.',
                               linestyle='solid',
                               marker='o')
        shearline = lines.Line2D([], [],
                                 color='k',
                                 label='Zonal wind shear 600-925hPa',
                                 linestyle='solid',
                                 marker='x',
                                 markersize=5)
        ax1.set_ylabel('degC')
        ax.set_ylabel('m s-1')
        ax.set_title(mnames[m] + ' | Corr.:' + str(np.round(rr, decimals=2)) +
                     '|' + str(np.round(rq[0], decimals=2)) +
                     '| Tgrad/Shear corr: ' +
                     str(np.round(tshear_cor[0], decimals=2)))
        if m == 3:
            ax.legend(handles=[mcsline, shearline], loc='lower left')
        f.savefig(out + 'timeseries_WA' + str(m) + '.png')

        plt.close('all')
Ejemplo n.º 20
0
def get_ERA5(inputs):

    date = inputs[0]
    indic = inputs[1]
    # era_pl = inputs[2]
    # era_srfc = inputs[3]
    ids = inputs[2]
    clim = inputs[3]

    dic = dictionary_storm()

    if clim:
        time = str(date.month).zfill(2) + '-12'
        stormtime = str(date.month).zfill(2) + '-18'

        pl_str = 'ERA5/monthly/synop_selfmade/CLIM_2000-2014/pressure_levels/ERA5_2000-2014_CLIM_'
        srfc_str = 'ERA5/monthly/synop_selfmade/CLIM_2000-2014/surface/ERA5_2000-2014_CLIM_'

        try:
            print('Open '+ cnst.local_data + pl_str + time + '_pl.nc')
            era_day_pl = xr.open_dataset(
                cnst.local_data + pl_str + time + '_pl.nc')
        except (TypeError, IndexError, KeyError):
            print('Era missing:', date)
            #             for k in dic.keys():
            #                 dic[k].append(np.nan)
            return

        era_day_sf = xr.open_dataset(
            cnst.local_data + srfc_str + time + '_srfc.nc')
        try:
            print('Open ' + cnst.local_data + srfc_str + stormtime + '_srfc.nc')
            era_day_sft = xr.open_dataset(
                cnst.local_data + srfc_str + stormtime + '_srfc.nc')
        except IndexError:
            return
        era_day_plt = xr.open_dataset(
            cnst.local_data + pl_str + stormtime + '_pl.nc')

    else:

        era_pl = xr.open_dataset(cnst.local_data + 'ERA5/hourly/pressure_levels/ERA5_' +str(date.year) + '_' + str(date.month).zfill(2) + '_pl.nc')
        era_srfc = xr.open_dataset(
            cnst.local_data + 'ERA5/hourly/surface/ERA5_' + str(date.year) + '_' + str(date.month).zfill(2) + '_srfc.nc')

        era_pl = era_pl.rename({'longitude' : 'lon', 'latitude' : 'lat'})
        era_srfc = era_srfc.rename({'longitude' : 'lon', 'latitude' : 'lat'})

        time = str(date.year) + str(date.month).zfill(2) + str(date.day).zfill(2) + 'T12'
        stormtime = str(date.year) + str(date.month).zfill(2) + str(date.day).zfill(2) + 'T18'

        try:
            era_day_pl = era_pl.sel(time=time).isel(time=0)
        except (TypeError, IndexError, KeyError):
            print('Era missing:', date)
            #             for k in dic.keys():
            #                 dic[k].append(np.nan)
            return

        era_day_sf = era_srfc.sel(time=time).isel(time=0)
        try:
            era_day_sft = era_srfc.sel(time=stormtime).isel(time=0)
        except IndexError:
            return
        era_day_plt = era_pl.sel(time=stormtime).isel(time=0)

    era_day_sf = uda.flip_lat(era_day_sf)
    era_day_pl = uda.flip_lat(era_day_pl)

    for id in ids:

        print('Doing', date)


        # elat = indic.clat[id]
        # elon = indic.clon[id]

        elat = indic.clat[id]
        elon = indic.minlon[id]

        # dic['dates'].append(date)
        # dic['lat'].append(elat)
        # dic['lon'].append(elon)
        # ipdb.set_trace()
        point = era_day_pl.sel(lat=elat, lon=elon, method='nearest')

        posx = int(np.where(era_day_sf.lon == point.lon)[0])
        posy = int(np.where(era_day_sf.lat == point.lat)[0])

        posxx = int(np.where(era_day_pl.lon == point.lon)[0])
        posyy = int(np.where(era_day_pl.lat == point.lat)[0])

        dist = 18  # ca 200km i.e. 45 *4.4km

        ds_pl = era_day_pl.apply(uda.cut_box, xpos=posxx, ypos=posyy, dist=dist)
        ds_srfc = era_day_sf.apply(uda.cut_box, xpos=posx, ypos=posy, dist=dist)

        ds_plt = era_day_plt.apply(uda.cut_box, xpos=posxx, ypos=posyy, dist=dist)
        ds_srfct = era_day_sft.apply(uda.cut_box, xpos=posx, ypos=posy, dist=dist)

        try:
            #ipdb.set_trace()
            dic['u925'].append(xr.DataArray(ds_pl['u'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id], 'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        except ValueError:
            print('Value problem')
            continue
        # ipdb.set_trace()
        dic['u650'].append(xr.DataArray(ds_pl['u'].sel(level=650).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id], 'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['v925'].append(xr.DataArray(ds_pl['v'].sel(level=925).values,
                                        coords={'tmean': indic.tmean[id], 'tmin': indic.tmin[id],
                                                'area': indic.area[id], 'time': date, 'lat': ds_pl['y'].values,
                                                'lon': ds_pl['x'].values}, dims=['lat', 'lon']))


        dic['q925'].append(xr.DataArray(ds_pl['q'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['q700'].append(xr.DataArray(ds_pl['q'].sel(level=700).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['d925'].append(xr.DataArray(ds_pl['d'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['CAPE'].append(xr.DataArray(ds_srfc['cape'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['tcwv'].append(xr.DataArray(ds_srfc['tcwv'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['t2'].append(xr.DataArray(ds_srfc['t2m'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['divMoist'].append(xr.DataArray(ds_srfc['p84.162'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['slp'].append(xr.DataArray(ds_srfc['msl'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['u10'].append(xr.DataArray(ds_srfc['u10'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['v10'].append(xr.DataArray(ds_srfc['v10'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))


        dic['u925_s'].append(
            xr.DataArray(ds_plt['u'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['v925_s'].append(
            xr.DataArray(ds_plt['v'].sel(level=925).values,
                         coords={'tmean': indic.tmean[id], 'tmin': indic.tmin[id], 'area': indic.area[id], 'time': date,
                                 'lat': ds_pl['y'].values, 'lon': ds_pl['x'].values}, dims=['lat', 'lon']))

        dic['u650_s'].append(
            xr.DataArray(ds_plt['u'].sel(level=650).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['q925_s'].append(
            xr.DataArray(ds_plt['q'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['q700_s'].append(
            xr.DataArray(ds_plt['q'].sel(level=700).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['d925_s'].append(
            xr.DataArray(ds_plt['d'].sel(level=925).values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['CAPE_s'].append(xr.DataArray(ds_srfct['cape'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['tcwv_s'].append(xr.DataArray(ds_srfct['tcwv'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['t2_s'].append(xr.DataArray(ds_srfct['t2m'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['divMoist_s'].append(xr.DataArray(ds_srfct['p84.162'].values, coords={'tmean':indic.tmean[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['slp_s'].append(xr.DataArray(ds_srfct['msl'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['u10_s'].append(xr.DataArray(ds_srfct['u10'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))
        dic['v10_s'].append(xr.DataArray(ds_srfct['v10'].values, coords={'tmean':indic.tmean[id],'tmin':indic.tmin[id],'area':indic.area[id],'time': date, 'lat': ds_pl['y'].values, 'lon':ds_pl['x'].values}, dims=['lat', 'lon']))

        print('DID', date)

    ds = xr.Dataset()

    try:
        for k in dic.keys():
            print('doing', k)

            ds[k] = xr.concat(dic[k], 'id')

    except ValueError:
        return


    return ds
Ejemplo n.º 21
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP # _SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-60_monthly_count_-50base.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    dicm = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_test.p', 'rb'))
    dicmean = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC_test.p', 'rb'))

    mcsbox = cnst.GRIDSAT + 'aggs/box_12-25E-23-10S_meanT-50_from5000km2_SA.nc' # box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    mcs_temp = xr.open_dataset(mcsbox)
    mcs_temp = mcs_temp['tir']

    box=[-18,55,-35,35]
    #box = [-18, 40, 0, 25]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude


    q = da['q'].sel(level=slice(800,850)).mean('level')
    #q = q[q['time.hour']==12]
    t2 = da['t'].sel(level=slice(800,850)).mean('level')
    #t2 = t2[t2['time.hour']==12]
    u925 = da['u'].sel(level=slice(800,850)).mean('level')
    #u925 = u925[u925['time.hour']==12]
    u600 = da['u'].sel(level=slice(600,650)).mean('level')
    #u600 = u600[u600['time.hour']==12]

    shear = u600-u925 # u600-

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)


    months = np.arange(1, 13)
    months = [1,2,3,4,5,6,7,8,9, 10,11,12]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour) & (data['time.year'] >= 1983)& (
            data['time.year'] <= 2017)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(data_years.values, coords=[data_years.year, data.latitude, data.longitude],
                            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean


    def corr(a, b, bsingle=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        #corr_box = [-10,11,4.5,8]
        corr_box = [13,25,-23,-10]

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]), longitude=slice(corr_box[0], corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    pf = np.polyfit(aa.values, bb, 1)
                else:
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)


                slope = pf[0]

                # if (np.nansum(aa.values == 0) >= 10):
                #     p = np.nan
                #     r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds

    for m in months:

        t2diff, t2year = array_juggling(t2, m, hour=12) #
        qdiff, qyear = array_juggling(q, m, hour=12) #, hour=12
        shdiff, sheyear = array_juggling(shear, m, hour=12) #, hour=12
        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        mcs_month = mcs_temp[mcs_temp['time.month'] == m] # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs)
        tcorr = corr(t2diff, tirdiff, bsingle=bs)


        # pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        pthresh = 0.05
        #cloud['slope'].values[cloud['pval'].values > pthresh] = np.nan
        #qcorr['r'].values[qcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #shearcorr['r'].values[shearcorr['pval'].values > pthresh] = 0

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        #tcorr['r'].values[tcorr['pval'].values > pthresh] = 0
    
        dicm[m].values[dicm[m].values==0] = np.nan

        print('plot')
        fp = fpath + 'corr_box_SYNOP_SAWest_-50base' + str(m).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(13,7), dpi=300)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(tcorr['r'], interp='linear')  # interp='linear'
        contours = map.set_contour(t2year-273.15, interp='linear', levels=np.arange(24,37,4), cmap='inferno')

        #plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='925hP temperature')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'],interp='linear')  # interp='linear'
        map.set_contour(qyear,interp='linear', levels=np.arange(5,19,3), cmap='inferno')

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='925hPa Spec. humidity')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        map.set_plot_params(cmap='RdBu_r', extend='both', levels=[-0.7,-0.6,-0.5,-0.4,0.4,0.5,0.6,0.7])
        map.set_contour(sheyear, interp='linear', levels=np.arange(-10,1,3), cmap='inferno')
          # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='600-925hPa Zonal wind shear')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m], interp='linear', levels=[0.1,0.5,1,2.5], cmap='inferno')
        map.set_data(dicm[m])  #

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,51,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C cloud cover change', cbar_title='$\%$ decade-1')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 22
0
def corr_box():
    srfc = cnst.ERA5_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA5_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'  # -70count

    fpath = cnst.network_data + 'figs/CLOVER/months/ERA5_WA/use/'

    dicm = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_WA.p', 'rb'))
    dicmean = pkl.load(open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean_synop12UTC_WA.p', 'rb'))

    # mcsbox = cnst.GRIDSAT + 'aggs/SAboxWest_meanT-40_1000km2.nc' # box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    # mcs_temp = xr.open_dataset(mcsbox)
    # mcs_temp = mcs_temp['tir']

    box=[-18,30,0,25]#[-18,55,-35,35]#[-10,55,-35,0]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2], box[3]))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    press = da2['sp']
    press = press[press['time.hour'] == 12]
    press.values = press.values*1000
    low_press = 925
    up_press = 650

    q = da['q'].sel(level=slice(low_press-50, low_press)).mean('level')
    q = q[q['time.hour']==12]
    t2d = da2['t2m']#['t2m']
    #t2d = da['t'].sel(level=slice(800, 850)).mean('level')
    t2d = t2d[t2d['time.hour']==12]

    u600 = da['u'].sel(level=slice(up_press-50, up_press)).mean('level')
    u600 = u600[u600['time.hour']==12]
    v600 = da['v'].sel(level=slice(up_press-50, up_press)).mean('level')
    v600 = v600[v600['time.hour']==12]

    u925 = da['u'].sel(level=slice(low_press-50, low_press)).mean('level')
    u925 = u925[u925['time.hour']==12]
    v925 = da['v'].sel(level=slice(low_press-50, low_press)).mean('level')
    v925 = v925[v925['time.hour']==12]
    #
    # u925_mean = u925.mean(axis=0)
    # v925_mean = v925.mean(axis=0)

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=slice(low_press-50, low_press)).mean('level')
    u800 = u800[u800['time.hour']==12]

    v800 = da['v'].sel(level=slice(low_press-50, low_press)).mean('level')
    v800 = v800[v800['time.hour']==12]

    shear_u = u600-u800 #u600-
    shear_v = v600-v800 # v600-

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'
    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2d.salem.lookup_transform(tir)


    def array_juggling(data, month, hour=None):

        m = month

        if hour is not None:
            if len(month) > 1:

                data = data[((data['time.month'] >= month[0]) | (data['time.month'] <= month[1])) & (
                            data['time.hour'] == hour) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
            else:

                data = data[
                    (data['time.month'] == month[0]) & (data['time.hour'] == hour) & (data['time.year'] >= 1983) & (
                                data['time.year'] <= 2017)]
        else:
            if len(month) > 1:
                data = data[((data['time.month'] >= month[0]) | (data['time.month'] <= month[1])) & (
                            data['time.year'] >= 1983) & (data['time.year'] <= 2017)]
            else:
                data = data[
                    (data['time.month'] == month[0]) & (data['time.year'] >= 1983) & (data['time.year'] <= 2017)]

        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :],
        #                     coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(data_years.values, coords=[data_years.year, data.latitude, data.longitude],
                            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean


    def corr(a, b, bsingle=None, c_box=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        corr_box = c_box

        if bsingle:
            bb = b
        else:
            bb = b.sel(latitude=slice(corr_box[2], corr_box[3]), longitude=slice(corr_box[0], corr_box[1])).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:
                aa = a.sel(latitude=lat, longitude=lon)
                if bsingle:
                    r, p = stats.pearsonr(aa.values, bb)

                    #pf = np.polyfit(aa.values, bb, 1)
                    pf, intercept, r, p, std_err = stats.linregress(aa.values, bb)
                else:
                    # r, p = stats.pearsonr(aa.values, bb.values)
                    # pf = np.polyfit(aa.values, bb.values, 1)
                    pf, intercept, r, p, std_err = stats.linregress(aa.values, bb.values)


                slope = pf#[0]

                if (np.nansum(aa.values == 0) >= 10):
                    p = np.nan
                    r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope
                ds['intercept'].loc[{'latitude': lat, 'longitude': lon}] = intercept

        return ds



    box_dic = {

        1 : [16,30,-25,-20],
        2 : [12,28,-10,3],
        3: [16,25,-23,-18],
        4: [16,25,-23,-18],
        10 : [14,28,-15,-5],
        11 : [16,30,-20,-10],
        12 : [16,30,-22,-12],
        (11,1) : [18,30,-23,-18]
    }


    months = [1,2,3,4,10,11,12]

    months = [3,4,10,5,9,11] #(3,5), (9,11),
    months = [3,10]

    for m in months:

        c_box = [-12,12,4.5,8.5] # [-12,12,4.5,8.5]#box_dic[m]

        if type(m)==int:
            m = [m]

        tirdiff, tiryear = array_juggling(tir, m)  # average frequency change

        t2diff, t2year = array_juggling(t2d, m) #
        qdiff, qyear = array_juggling(q, m) #, hour=12
        shdiff, sheyear = array_juggling(shear, m) #, hour=12
        vdiff, vyear = array_juggling(v925, m)  # , hour=12
        udiff, uyear = array_juggling(u925, m)  # , hour=12
        vdiff6, vyear6 = array_juggling(u600, m)  # , hour=12


        #mcs_month = mcs_temp[mcs_temp['time.month'] == m] # meanT box average change

        #tirdiff = mcs_month.values[1::]-mcs_month.values[0:-1]

        bs = False
        try:
            qcorr = corr(qdiff, tirdiff, bsingle=bs, c_box=c_box)
        except:
            continue
        shearcorr = corr(shdiff, tirdiff, bsingle=bs, c_box=c_box)
        tcorr = corr(t2diff, tirdiff, bsingle=bs, c_box=c_box)
        vcorr = corr(vdiff6,tirdiff, bsingle=bs, c_box=c_box)

        dicm[m[0]].values[dicm[m[0]].values==0] = np.nan  #.values

        print('plot')

        if len(m) == 1:
            fp = fpath + 'ERA5_corr_WA_-70C_synop_linear_SLOPE_'+str(m[0]).zfill(2)+'.png'
        else:
            fp = fpath + 'ERA5_corr_WA_-70C_synop_linear_SLOPE_' + str(m[0]).zfill(2) +'-'+ str(m[1]).zfill(2) + '.png'


        map = shear.salem.get_map()

        xx, yy = map.grid.transform(shear.longitude.values, shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)

        # Quiver only every 7th grid point
        u = uyear.values[1::2, 1::2]
        v = vyear.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        #ipdb.set_trace()
        f = plt.figure(figsize=(15,8), dpi=350)
        ax1 = f.add_subplot(221)
        ipdb.set_trace()
        map.set_data(tcorr['intercept']+tcorr['slope'], interp='linear')  # interp='linear'
        map.set_contour(t2year-273.15, interp='linear', levels=np.arange(24,37,4),colors='k', linewidths=0.5)


        map.set_plot_params(cmap='RdBu_r', extend='both',levels=np.arange(-0.5,0.51,0.1)) # , levels=[-0.7,-0.6,-0.5,-0.4,-0.3,0.3,0.4,0.5,0.6,0.7])
        dic = map.visualize(ax=ax1, title='2m temperature corr. | contours: mean T', cbar_title='')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['slope']*10,interp='linear')  # interp='linear'
        map.set_contour(qyear,interp='linear', levels=np.arange(5,19,3), colors='k', linewidths=0.5)

        map.set_plot_params(cmap='RdBu_r', extend='both',levels=[-0.7,-0.6,-0.5,-0.4,-0.3,0.3,0.4,0.5,0.6,0.7])  # levels=np.arange(-0.5,0.51,0.1),
        dic = map.visualize(ax=ax2, title='925hPa Spec. humidity corr. | contours: mean q', cbar_title='')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['slope']*10, interp='linear')  # interp='linear'
        map.set_plot_params(cmap='RdBu_r', extend='both', levels=[-0.7,-0.6,-0.5,-0.4, -0.3,0.3,0.4,0.5,0.6,0.7])
        #map.set_contour(sheyear, interp='linear', levels=np.arange(-10,1,6), colors='k', linewidths=0.5)

        qu = ax3.quiver(xx, yy, u, v, scale=70, width=0.002)
        qk = plt.quiverkey(qu, 0.4, 0.03, 4, '4 m s$^{-1}$',
                           labelpos='E', coordinates='figure')

          # levels=np.arange(-0.5,0.51,0.1)
        dic = map.visualize(ax=ax3, title='650-925hPa wind shear corr., mean 925hPa wind vectors', cbar_title='')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax4 = f.add_subplot(224)
        map.set_contour(dicmean[m[0]], interp='linear', levels=[0.1,0.5,1,2.5], colors='k', linewidths=0.5)

        map.set_data(dicm[m[0]])  #
        #ax4.axhspan(-26,18)  #[15,25,-26,-18]
        coord = c_box#[17, 25, -28, -22]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.1)
        # ax4.axvline(x=25, ymin=-26, ymax=-18)
        # ax4.axhline(y=-26, xmin=15, xmax=25)
        # ax4.axhline(y=-18, xmin=15, xmax=25)

        map.set_plot_params(cmap='viridis', extend='both', levels=np.arange(10,41,10))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        dic = map.visualize(ax=ax4, title='-70C cloud cover change | >5000km2', cbar_title='$\%$ decade-1')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 23
0
def trend_all():

    srfc = cnst.ERA5_MONTHLY_SRFC_SYNOP  #cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA5_MONTHLY_PL_SYNOP  #cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'  #gridsat_WA_-70_monthly_mean_5000km2.nc' #gridsat_WA_-50_monthly_count_-50base.nc' #gridsat_WA_-70_monthly_mean_5000km2.nc'  gridsat_WA_-50_monthly_count

    fpath = cnst.network_data + 'figs/CLOVER/months/ERA5_WA/'

    box = [-18, 30, 0, 25]  #[-18,30,0,25]#  [-18,40,0,25] #

    da = xr.open_dataset(pl)  #xr.open_dataset(pl)
    #da = xr.decode_cf(da)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))
    da2 = xr.open_dataset(srfc)  #xr.open_dataset(srfc)
    #da2 = xr.decode_cf(da2)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))
    da3 = xr.open_dataarray(mcs) * 100  #/30*100
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))
    #ipdb.set_trace()
    da = da.isel(time=(da['time.hour'] == 12))
    da2 = da2.isel(time=(da2['time.hour'] == 12))

    lons = da.longitude
    lats = da.latitude

    press = da2['tcwv']
    #press = press[press['time.hour'] == 12]
    #press.values = press.values#*1000
    low_press = 950
    up_press = 650
    mid_press = 700

    q = da['q'].sel(level=slice(low_press - 30, low_press)).mean('level')
    t2d = da2['t2m']

    theta_low = u_met.theta_e(
        low_press,
        da['t'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values - 273.15,
        da['q'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values)
    theta_high = u_met.theta_e(
        mid_press,
        da['t'].sel(level=slice(up_press, mid_press)).mean('level').values -
        273.15,
        da['q'].sel(level=slice(up_press, mid_press)).mean('level').values)
    theta_high_d = u_met.theta(
        mid_press,
        da['t'].sel(level=slice(up_press, mid_press)).mean('level').values -
        273.15)
    theta_low_d = u_met.theta(
        low_press,
        da['t'].sel(level=slice(low_press -
                                30, low_press)).mean('level').values - 273.15)

    # punit = units.Quantity(mid_press, 'hPa')
    # tunit = units.Quantity(da['t'].sel(level=slice(mid_press-30, mid_press)).mean('level').values, 'K')
    # theta_high_d = calc.saturation_equivalent_potential_temperature(punit,tunit)
    #
    # punit = units.Quantity(low_press, 'hPa')
    # tunit = units.Quantity(da['t'].sel(level=slice(low_press-30, low_press)).mean('level').values, 'K')
    # theta_low_d = calc.saturation_equivalent_potential_temperature(punit, tunit)

    theta_diff = (theta_high /
                  theta_low) * 100  #(np.array(theta_high)-273.15) #theta_low -
    theta_diff_d = da2[
        'cape']  ##np.array(theta_low_d) - np.array(theta_high_d)
    #
    theta_e = t2d.copy(deep=True)
    theta_e.name = 'theta'
    theta_e.values = theta_diff

    theta_e = da['r'].sel(level=slice(mid_press - 30, mid_press)).mean(
        'level')  #da2['cape']

    theta_d = t2d.copy(deep=True)
    theta_d.name = 'theta'
    theta_d.values = theta_diff_d

    u600 = da['u'].sel(level=slice(up_press, mid_press)).mean('level')
    v600 = da['v'].sel(level=slice(up_press, mid_press)).mean('level')
    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=925)

    v800 = da['v'].sel(level=925)

    shear_u = u600 - u800
    shear_v = v600 - v800
    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = t2d.copy(deep=True)
    ws_600.name = 'ws'

    ws_600.values = ws600[0]

    shear = t2d.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    u6 = shear_u
    v6 = shear_v

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=1)
    t2 = t2d  # grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3)  #t2d.salem.lookup_transform(da3['tir']) #

    grid = grid.to_dataset()
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])

    months = [
        1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
    ]  #[3,4,5,6,9,10,11]#,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    for m in months:
        method = 'polyfit'

        if type(m) == int:
            m = [m]

        sig = True

        t2trend, t2mean = calc_trend(t2,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        t2_mean = t2mean.mean(axis=0)

        tirtrend, tirmean = calc_trend(tir,
                                       m,
                                       method=method,
                                       sig=True,
                                       wilks=False)

        tirm_mean = tirmean.mean(axis=0)

        qtrend, qmean = calc_trend(q,
                                   m,
                                   method=method,
                                   sig=sig,
                                   hour=12,
                                   wilks=False)  #hour=12,
        q_mean = qmean.mean(axis=0)

        sheartrend, shearmean = calc_trend(shear,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        shear_mean = shearmean.mean(axis=0)

        presstrend, pressmean = calc_trend(press,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        press_mean = pressmean.mean(axis=0)

        u6trend, u6mean = calc_trend(u6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        u6_mean = u6mean.mean(axis=0)
        v6trend, v6mean = calc_trend(v6,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        v6_mean = v6mean.mean(axis=0)

        u8trend, u8mean = calc_trend(u800,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        u8_mean = u8mean.mean(axis=0)
        v8trend, v8mean = calc_trend(v800,
                                     m,
                                     method=method,
                                     sig=sig,
                                     hour=12,
                                     wilks=False)  #hour=12,
        v8_mean = v8mean.mean(axis=0)

        aej = np.argmin(u6_mean, axis=0)
        itd = np.argmin(np.abs(v8_mean.values), axis=0)

        thetatrend, thetamean = calc_trend(theta_e,
                                           m,
                                           method=method,
                                           sig=sig,
                                           hour=12,
                                           wilks=False)  #hour=12,
        theta_mean = thetamean.mean(axis=0)

        thetatrend_d, thetamean_d = calc_trend(theta_d,
                                               m,
                                               method=method,
                                               sig=sig,
                                               hour=12,
                                               wilks=False)  #hour=12,
        thetad_mean = thetamean_d.mean(axis=0)

        t_da = t2trend * 10.  # warming over decade
        q_da = qtrend * 10.  # warming over decade
        s_da = sheartrend * 10.  # warming over decade
        u6trend = u6trend * 10
        v6trend = v6trend * 10
        tcwv_da = presstrend * 10
        theta_da = thetatrend * 10
        thetad_da = thetatrend_d * 10
        u8trend = u8trend * 10
        v8trend = v8trend * 10

        tdata = (tirtrend.values * 10. / tirm_mean.values) * 100.
        #ipdb.set_trace()
        tirtrend_out = xr.DataArray(tdata,
                                    coords=[grid['y'], grid['x']],
                                    dims=['latitude', 'longitude'])
        tirtrend_out.name = 'tir'
        #tirmean_out = xr.DataArray(tirm_mean, coords=[grid['y'], grid['x']], dims=['latitude','longitude'])

        dicm[m[0]] = tirtrend_out
        dicmean[m[0]] = tirm_mean

        if len(m) == 1:
            fp = fpath + 'use/ERA5_-70_use_nosig_2003_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'use/ERA5_-70_use_nosig_2003_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'
        map = shear.salem.get_map(countries=False)
        # Change the country borders
        map.set_shapefile(countries=True, color='grey', linewidths=0.5)
        #map.set_lonlat_contours(interval=0)
        # Change the lon-lat countour setting
        map.set_lonlat_contours(add_ytick_labels=True,
                                interval=5,
                                linewidths=0.01,
                                linestyles='-',
                                colors='white')

        ti_da = t2d.salem.transform(tirtrend_out)

        f = plt.figure(figsize=(15, 8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xx, yy = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xaej, yaej = map.grid.transform(u6_mean.longitude.values,
                                        u6_mean.latitude.values[aej.values],
                                        crs=shear.salem.grid.proj)

        xitd, yitd = map.grid.transform(v8_mean.longitude.values,
                                        v8_mean.latitude.values[itd],
                                        crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xx, yy)

        #ipdb.set_trace()
        #Quiver only every 7th grid point
        u = u6trend.values[1::2, 1::2]
        v = v6trend.values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = u8trend.values[1::2, 1::2]
        vv = v8trend.values[1::2, 1::2]

        #Quiver only every 7th grid point
        um = u8_mean.values[1::2, 1::2]
        vm = v8_mean.values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        # pdic = {
        #     'tlin' : (t2_mean.values-273.15).astype(np.float64),
        #     'tmean' : (t2_mean.values-273.15).astype(np.float64),
        #     'qmean' : (q_mean.values).astype(np.float64),
        #     'qlin'  : q_da.values,
        #     'shearlin' : s_da.values,
        #     'u' : u,
        #     'v' : v,
        #     'xx' : xx,
        #     'yy' : yy,
        #     'tirmean' : tirm_mean,
        #
        #
        # }

        # pkl.dump(dicm,
        #          open(cnst.network_data + 'data/CLOVER/saves/storm_frac_synop12UTC_WA.p',
        #               'wb'))

        map.set_shapefile(countries=True, linewidths=1.2, color='grey')

        ax1 = f.add_subplot(221)
        map.set_data(t_da.values, interp='linear')  # interp='linear'

        map.set_contour(s_da.values,
                        interp='linear',
                        levels=[0.4, 0.6, 0.8],
                        colors='k',
                        linewidths=1.8)
        map.set_plot_params(
            levels=[-0.5, -0.4, -0.3, -0.2, 0.2, 0.3, 0.4, 0.5],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        qu = ax1.quiver(xx, yy, u, v, scale=30, width=0.002, headwidth=4)

        # qk = plt.quiverkey(qu, 0.4, 0.03, 1, '1 m s$^{-1}$decade$^{-1}$',
        #                    labelpos='E', coordinates='figure')

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        dic = map.visualize(
            ax=ax1,
            title=
            '2m temperature | 925-600hPa wind shear | 650hPa wind vectors',
            cbar_title=r'K decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')
        qk = plt.quiverkey(qu,
                           0.45,
                           0.52,
                           1,
                           '1 m s$^{-1}$decade$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax2 = f.add_subplot(222)
        map.set_data(theta_da.values - 0.2, interp='linear')  # interp='linear'
        map.set_contour(
            (q_da.values).astype(np.float64),
            interp='linear',
            colors='k',
            linewidths=1.8,
            levels=[
                -0.6, -0.4, -0.2, 0.2, 0.4, 0.6
            ])  #[6,8,10,12,14,16] #levels=[-0.6,-0.4,-0.2,0.2,0.4, 0.6],
        map.set_plot_params(
            levels=np.array([-0.4, -0.3, -0.2, -0.1, 0.1, 0.2, 0.3, 0.4]) * 10,
            cmap='RdBu',
            extend='both'
        )  # levels=np.arange(-0.5,0.51,0.1), [-0.6,-0.4,-0.2,0.2,0.4,0.6]

        qu = ax2.quiver(xx, yy, um, vm, scale=100, width=0.002, headwidth=4)
        qk = plt.quiverkey(qu,
                           0.94,
                           0.52,
                           3,
                           '3 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        dic = map.visualize(
            ax=ax2,
            title=r'650hPa RH | 925hPa q | 925hPa wind vectors',
            cbar_title=r'% decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')

        ax3 = f.add_subplot(223)
        map.set_data(tcwv_da.values - 0.05, interp='linear')  # interp='linear'
        map.set_contour(thetad_da.values,
                        interp='linear',
                        levels=np.array([-2, -1.5, -1, -0.5, 0.5, 1, 1.5, 2]) *
                        100,
                        colors='k',
                        linewidths=1.8)

        map.set_plot_params(levels=[
            -1.5, -1, -0.8, -0.6, -0.4, -0.2, 0.2, 0.4, 0.6, 0.8, 1, 1.5
        ],
                            cmap='RdBu',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1)

        qu = ax3.quiver(xx, yy, uu, vv, scale=30, width=0.002, headwidth=4)

        qk = plt.quiverkey(qu,
                           0.45,
                           0.03,
                           1,
                           '1 m s$^{-1}$decade$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        dic = map.visualize(ax=ax3,
                            title=r'TCWV | CAPE | 925hPa wind vectors',
                            cbar_title=r'kg m$^{-2}$ decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.0f')

        ax4 = f.add_subplot(224)
        map.set_contour((tirm_mean),
                        interp='linear',
                        levels=[0.1, 1, 2, 4],
                        colors='k',
                        linewidths=1.5)

        ti_da.values[ti_da.values == 0] = np.nan
        map.set_data(ti_da)  #
        coord = [18, 25, -28, -20]
        geom = shpg.box(coord[0], coord[2], coord[1], coord[3])
        #map.set_geometry(geom, zorder=99, color='darkorange', linewidth=3, linestyle='--', alpha=0.3)

        map.set_plot_params(cmap='viridis',
                            extend='both',
                            levels=np.arange(10, 41,
                                             10))  # levels=np.arange(10,51,10)

        ax4.scatter(xaej, yaej, color='r', s=50, edgecolors='r', linewidths=1)

        #ax4.scatter(xitd, yitd, color='r', s=50, edgecolors='k', linewidths=1)

        dic = map.visualize(ax=ax4,
                            title='-70$^{\circ}$C cloud cover change ',
                            cbar_title='$\%$ decade$^{-1}$')
        contours = dic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=9, fmt='%1.1f')

        plt.tight_layout()

        plt.annotate('a)',
                     xy=(0.02, 0.96),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('b)',
                     xy=(0.49, 0.96),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('c)',
                     xy=(0.02, 0.48),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')
        plt.annotate('d)',
                     xy=(0.49, 0.48),
                     xytext=(0, 4),
                     size=13,
                     xycoords=('figure fraction', 'figure fraction'),
                     textcoords='offset points')

        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 24
0
def trend_all_polyfit():
    srfc = cnst.ERA_MONTHLY_SRFC
    pl = cnst.ERA_MONTHLY_PL
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_count.nc'

    fpath = '/users/global/cornkle/figs/CLOVER/months/'

    box = [-18, 40, 0, 30]
    #box = [-8, -4, 5.5, 7.5]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=925)
    qdry = da['q'].sel(level=650)
    #q = da2['tcwv']
    t2d = da['t'].sel(level=925)

    u925 = da['u'].sel(level=925)  #).mean(dim='level')  #slice(850
    u600 = da['u'].sel(level=650)

    shear = u600 - u925

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])
    q = grid.lookup_transform(q)
    qdry = grid.lookup_transform(q)
    shear = grid.lookup_transform(shear)

    # tir = t2d.salem.lookup_transform(da3['tir'])
    #
    # t2 = t2d
    # #tir = da3['tir']
    # q = q
    # shear = shear

    grid = grid.to_dataset()

    t2 = xr.DataArray(t2,
                      coords=[t2d['time'], grid['y'], grid['x']],
                      dims=['time', 'latitude', 'longitude'])
    q = xr.DataArray(q,
                     coords=[t2d['time'], grid['y'], grid['x']],
                     dims=['time', 'latitude', 'longitude'])
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])
    shear = xr.DataArray(shear,
                         coords=[t2d['time'], grid['y'], grid['x']],
                         dims=['time', 'latitude', 'longitude'])

    months = np.arange(1, 13)

    #months=[10]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m)
                        & (data['time.hour'] == hour)]
        else:
            data = data[(data['time.month'] == m)]

        data_years = data.groupby('time.year').mean(axis=0)

        # stack lat and lon into a single dimension called allpoints
        datastacked = data_years.stack(allpoints=['latitude', 'longitude'])

        # apply the function over allpoints to calculate the trend at each point

        print('Entering trend calc')
        dtrend = datastacked.groupby('allpoints').apply(
            u_darrays.linear_trend)  #_trend_mk, alpha=0.05, eps=0.0001)
        #dtrend = datastacked.groupby('allpoints').apply(u_darrays.linear_trend)
        ddtrend = dtrend['slope']

        # unstack back to lat lon coordinates
        return ddtrend.unstack('allpoints'), data_years

    for m in months:
        tirtrend, dyears = array_juggling(tir, m)
        tirm_mean = dyears.mean(axis=0)
        t2trend, dummy = array_juggling(t2, m)  #, hour=12

        qtrend, dummy = array_juggling(q, m)  #, hour=12

        sheartrend, dummy = array_juggling(shear, m)  #, hour=12

        t2trend_unstacked = t2trend * 10.  # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        tirtrend_unstacked = (
            (tirtrend.values) * 10.) * 100. / tirm_mean.values

        t_da = t2trend_unstacked  #xr.DataArray(t2trend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        q_da = qtrend_unstacked  #xr.DataArray(qtrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        s_da = sheartrend_unstacked  #xr.DataArray(sheartrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        ti_da = tirtrend_unstacked  #xr.DataArray(tirtrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])

        fp = fpath + 'ttrend_linear_-70C_coarse_u_srfc' + str(m).zfill(
            2) + '.png'  #'MCS_trend_10_TEST_mk.png'#
        map = shear.salem.get_map()

        f = plt.figure(figsize=(8, 5), dpi=300)
        ax1 = f.add_subplot(221)

        # map.set_shapefile(rivers=True)
        map.set_plot_params()

        map.set_data(t_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu_r',
                            extend='both')
        map.visualize(ax=ax1, title='t2')

        ax2 = f.add_subplot(222)
        map.set_data(q_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax2, title='q')

        ax3 = f.add_subplot(223)
        map.set_data(s_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-1, 1.1, 0.2),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax3, title='u-shear')

        ax4 = f.add_subplot(224)
        map.set_data(ti_da)  # interp='linear'
        map.set_plot_params(cmap='Blues',
                            extend='both')  #levels=np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C frequency')

        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 25
0
def file_loop(f):
    print('Doing file: ' + f)

    dic = xr.open_dataset(f)
    edate = pd.Timestamp(dic.time.values)

    if np.nanmin(dic['tc_lag0'].values) > -53:
        return

    outt = dic['tc_lag0'].values
    outp = dic['p'].values

    tminpos = np.where(dic['tc_lag0'].values == np.nanmin(
        dic['tc_lag0'].values))  # era position close to min temp
    if len(tminpos[0]) > 1:
        ptmax = np.nanmax((dic['p'].values)[tminpos])
        if ptmax > 0:
            prpos = np.where((dic['p'].values)[tminpos] == ptmax)
            tminpos = ((tminpos[0])[prpos], (tminpos[1])[prpos])
        else:
            tminpos = ((tminpos[0])[0], (tminpos[1])[0])

    elon = dic['lon'].values[tminpos]
    elat = dic['lat'].values[tminpos]

    out = dictionary()
    out['lon'] = dic['lon'].values
    out['lat'] = dic['lat'].values
    out['hour'] = dic['time.hour'].item()
    out['month'] = dic['time.month'].item()
    out['year'] = dic['time.year'].item()
    out['date'] = dic['time'].values

    out['clat'] = np.min(out['lat']) + (
        (np.max(out['lat']) - np.min(out['lat'])) * 0.5)
    out['clon'] = np.min(out['lon']) + (
        (np.max(out['lon']) - np.min(out['lon'])) * 0.5)

    if (out['clat'] < 9) | (out['clon'] < -15) | (out['clon'] > 15):
        print('MCS out of box')
        return

    # if (edate.hour < 16):
    #     return

    try:
        #era_pl = xr.open_dataset('/home/ck/DIR/mymachine/ERA5/monthly/synoptic/pl_2004-2019_monthlyCLIM_synop_07x07.nc')
        era_pl = xr.open_dataset(
            '/home/ck/DIR/mymachine/ERA5/monthly/synoptic/pl_1979-2019_monthly_synop_07x07.nc'
        )
    except:
        print('ERA5 pl missing')
        return
    try:
        #era_srfc = xr.open_dataset('/home/ck/DIR/mymachine/ERA5/monthly/synoptic/srfc_2004-2019_monthlyCLIM_synop_07x07.nc')
        era_srfc = xr.open_dataset(
            '/home/ck/DIR/mymachine/ERA5/monthly/synoptic/srfc_1979-2019_monthly_synop_07x07.nc'
        )
    except:
        print('ERA5 srfc missing')
        return
    era_pl = uda.flip_lat(era_pl)
    era_srfc = uda.flip_lat(era_srfc)

    edate = edate.replace(hour=12, minute=0, day=1)

    era_pl_day = era_pl.sel(time=edate,
                            longitude=slice(-16, 17),
                            latitude=slice(
                                4, 26))  #.groupby('time.month').mean('time')
    era_srfc_day = era_srfc.sel(time=edate,
                                longitude=slice(-16, 17),
                                latitude=slice(
                                    4,
                                    26))  #.groupby('time.month').mean('time')

    # try:
    #     era_day = era.isel(time=int(getera[0]))
    # except TypeError:
    #     print('Era missing')
    #     return

    res = []

    try:
        era_day = era_pl_day.sel(latitude=elat,
                                 longitude=elon,
                                 method='nearest',
                                 tolerance=0.7)  # take point of minimum T
    except:
        return
    era_day_srfc = era_srfc_day.sel(latitude=elat,
                                    longitude=elon,
                                    method='nearest',
                                    tolerance=0.7)  # take point of minimum T

    del era_srfc_day

    e925 = era_day.sel(level=925).mean()

    e850 = era_pl_day['t'].sel(level=850)
    elow = era_day.sel(level=slice(925, 850)).mean('level').mean()
    e650 = era_day.sel(level=650).mean()
    emid = era_day.sel(level=slice(600, 700)).mean('level').mean()
    srfc = era_day_srfc.mean()

    t_thresh = -50  # -40C ~ 167 W m-2
    mask = np.isfinite(outp) & (outt <= t_thresh) & np.isfinite(outt)
    mask_area = (outt <= t_thresh) & np.isfinite(outt)
    mask70 = (outt <= -70) & np.isfinite(outt)

    if np.sum(mask) < 3:
        return

    print(
        np.nanmax(outt[mask])
    )  # can be bigger than cutout threshold because of interpolation to 5km grid after cutout

    out['area'] = np.sum(mask_area)
    out['area70'] = np.sum(mask70)

    out['clat'] = np.min(out['lat']) + (
        (np.max(out['lat']) - np.min(out['lat'])) * 0.5)
    out['clon'] = np.min(out['lon']) + (
        (np.max(out['lon']) - np.min(out['lon'])) * 0.5)

    out['tmin'] = np.min(outt[mask])
    out['tmean'] = np.mean(outt[mask])

    maxpos = np.unravel_index(np.nanargmax(outp), outp.shape)
    out['pmax'] = np.nanmean(ua.cut_kernel(outp, maxpos[1], maxpos[0],
                                           1))  #np.max(outp[mask])
    out['pmean'] = np.mean(outp[mask])

    dbox = e850.copy(deep=True)
    minlon = era_pl_day.sel(latitude=8,
                            longitude=np.min(out['lon']),
                            method='nearest')
    maxlon = era_pl_day.sel(latitude=8,
                            longitude=np.max(out['lon']),
                            method='nearest')

    del era_pl_day

    tgrad = dbox.sel(longitude=slice(
        minlon.longitude.values, maxlon.longitude.values)).mean('longitude')

    tmin = np.nanargmin(tgrad.values)
    tmax = np.nanargmax(tgrad.values)
    tgrad = tgrad.isel(latitude=slice(tmin, tmax))

    lingress = uda.linear_trend_lingress(tgrad)

    out['Tgrad'] = lingress['slope'].values

    tgrad2 = dbox.sel(longitude=slice(np.min(out['lon']), np.max(out['lon'])), latitude=slice(10, 20)).mean(
        ['longitude', 'latitude']) - \
             dbox.sel(longitude=slice(np.min(out['lon']), np.max(out['lon'])), latitude=slice(5, 7)).mean(['longitude', 'latitude'])
    out['Tgradbox'] = tgrad2.values

    try:
        out['q925'] = float(e925['q'])
    except TypeError:
        return

    out['q650'] = float(e650['q'])
    out['v925'] = float(e925['v'])
    out['v650'] = float(e925['v'])
    out['u925'] = float(e925['u'])
    out['u650'] = float(e650['u'])
    out['w925'] = float(e925['w'])
    out['w650'] = float(e650['w'])
    out['rh925'] = float(e925['r'])
    out['rh650'] = float(e650['r'])
    out['t925'] = float(e925['t'])
    out['t650'] = float(e650['t'])
    # out['pv925'] = float(e925['pv'])
    # out['pv650'] = float(e650['pv'])
    out['div925'] = float(e925['d'])
    out['div650'] = float(e650['d'])
    out['q_low'] = float(elow['q'])
    out['q_mid'] = float(emid['q'])
    out['tcwv'] = float(srfc['tcwv'])

    out['shear'] = float(e650['u'] - e925['u'])

    theta_down = u_met.theta_e(925, e925['t'] - 273.15, e925['q'])
    theta_up = u_met.theta_e(650, e650['t'] - 273.15, e650['q'])

    out['dtheta'] = (theta_down - theta_up).values
    out['thetaup'] = theta_up.values
    out['thetadown'] = theta_down.values

    out['pgt30'] = np.sum(outp[mask] >= 30)
    out['isvalid'] = np.sum(mask)
    out['pgt01'] = np.sum(outp[mask] >= 0.1)
    #
    out['p'] = outp[mask]
    out['t'] = outt[mask]
    #ipdb.set_trace()
    dic.close()

    del era_day
    del era_day_srfc
    del era_pl
    del era_srfc

    return out
Ejemplo n.º 26
0
def corr_box():
    srfc = cnst.ERA_MONTHLY_SRFC_SYNOP
    pl = cnst.ERA_MONTHLY_PL_SYNOP
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_count.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    dicm = pkl.load(
        open(cnst.network_data + 'data/CLOVER/saves/storm_frac.p', 'rb'))

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(-18, 40),
                latitude=slice(0, 25))  # latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(-18, 40),
                  latitude=slice(0, 25))  # latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(-18, 40), lat=slice(0, 25))

    lons = da.longitude
    lats = da.latitude

    q = da['q'].sel(level=slice(850, 925)).mean(dim='level')
    # q = da2['tcwv']
    t2 = da['t'].sel(level=925)

    u925 = da['v'].sel(level=925)  #.mean(dim='level') c#slice(850,
    u600 = da['u'].sel(level=slice(600, 650)).mean(dim='level')

    shear = u925

    q.values = q.values * 1000

    tir = da3['tir']
    tir = t2.salem.lookup_transform(tir)

    months = np.arange(1, 13)
    months = [3, 4, 5, 6, 10]

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m) & (data['time.hour'] == hour)
                        & (data['time.year'] >= 1983) &
                        (data['time.year'] <= 2017)]
        else:
            data = data[(data['time.month'] == m) & (data['time.year'] >= 1983)
                        & (data['time.year'] <= 2017)]
        data_years = data.groupby('time.year').mean(axis=0)

        data_mean = data.mean(axis=0)

        # diff = xr.DataArray(data_years.values[1::, :, :] - data_years.values[0:-1, :, :], coords=[data_years.year[1::], data.latitude, data.longitude], dims=['year','latitude', 'longitude'] )
        diff = xr.DataArray(
            data_years.values,
            coords=[data_years.year, data.latitude, data.longitude],
            dims=['year', 'latitude', 'longitude'])
        # unstack back to lat lon coordinates
        return diff, data_mean

    def corr(a, b, single=None):
        ds = xr.Dataset()
        ds['pval'] = a.copy(deep=True).sum('year') * np.nan
        ds['r'] = a.copy(deep=True).sum('year') * np.nan
        ds['slope'] = a.copy(deep=True).sum('year') * np.nan

        aa = a.sel(latitude=slice(4, 8),
                   longitude=slice(-10,
                                   11)).mean(dim=['latitude', 'longitude'])

        for lat in a.latitude.values:
            for lon in a.longitude.values:

                if single:
                    bb = b
                    r, p = stats.pearsonr(bb, aa.values)
                    pf = np.polyfit(bb, aa.values, 1)

                else:
                    bb = b.sel(latitude=lat, longitude=lon)
                    r, p = stats.pearsonr(aa.values, bb.values)
                    pf = np.polyfit(aa.values, bb.values, 1)
                slope = pf[0]

                # if (np.nansum(aa.values == 0) >= 10):
                #     p = np.nan
                #     r = np.nan

                ds['r'].loc[{'latitude': lat, 'longitude': lon}] = r
                ds['pval'].loc[{'latitude': lat, 'longitude': lon}] = p
                ds['slope'].loc[{'latitude': lat, 'longitude': lon}] = slope

        return ds

    for m in months:
        t2diff, t2year = array_juggling(t2, m, hour=12)  #
        qdiff, qyear = array_juggling(q, m, hour=12)  #, hour=12
        shdiff, sheyear = array_juggling(shear, m, hour=12)  #, hour=12
        tirdiff, tiryear = array_juggling(tir, m)

        qcorr = corr(tirdiff, qdiff)
        shearcorr = corr(tirdiff, shdiff)
        tcorr = corr(tirdiff, t2diff)
        cloud = corr(tirdiff, np.arange(len(tirdiff)), single=True)

        # pthresh = us.fdr_threshold(qcorr['pval'].values[np.isfinite(qcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        pthresh = 0.01
        #cloud['slope'].values[cloud['pval'].values > pthresh] = np.nan
        qcorr['r'].values[qcorr['pval'].values > pthresh] = np.nan

        # pthresh = us.fdr_threshold(shearcorr['pval'].values[np.isfinite(shearcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        shearcorr['r'].values[shearcorr['pval'].values > pthresh] = np.nan

        # pthresh = us.fdr_threshold(tcorr['pval'].values[np.isfinite(tcorr['pval'].values)], alpha=0.05)
        # print(pthresh)
        tcorr['r'].values[tcorr['pval'].values > pthresh] = np.nan

        tplot = ((cloud['slope'].values) * 10. / tiryear.values) * 100.

        fp = fpath + 'corr_synop_v_srfc' + str(m).zfill(2) + '.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(8, 5), dpi=300)
        ax1 = f.add_subplot(221)
        # map.set_shapefile(rivers=True)
        # bla = ma.masked_invalid(tcorr['r'].values)

        map.set_data(tcorr['r'].values, interp='linear')  # interp='linear'
        # map.set_contour(t2year.mean('year')-273.15)

        map.set_plot_params(cmap='RdBu_r',
                            extend='both',
                            levels=np.arange(
                                -0.7, 0.71,
                                0.1))  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax1, title='t2')

        ax2 = f.add_subplot(222)
        map.set_data(qcorr['r'], interp='linear')  # interp='linear'
        # map.set_contour(qyear.mean('year'))
        map.set_plot_params(cmap='RdBu_r',
                            extend='both',
                            levels=np.arange(
                                -0.7, 0.71,
                                0.1))  # levels=np.arange(-0.5,0.51,0.1),
        map.visualize(ax=ax2, title='q')

        ax3 = f.add_subplot(223)
        map.set_data(shearcorr['r'], interp='linear')  # interp='linear'
        # map.set_contour(sheyear.mean('year'))
        map.set_plot_params(cmap='RdBu_r',
                            extend='both',
                            levels=np.arange(
                                -0.7, 0.71,
                                0.1))  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3, title='u-shear')

        ax4 = f.add_subplot(224)
        map.set_data(dicm[m])  # interp='linear'
        map.set_plot_params(
            cmap='Blues', extend='both', levels=np.arange(
                0.1, 0.8,
                0.1))  # levels=np.arange(20,101,20)  #np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C frequency')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 27
0
def tgrad_shear_trend():

    srfc = cnst.ERA_MONTHLY_SRFC
    pl = cnst.ERA_MONTHLY_PL
    mcs = cnst.GRIDSAT + 'aggs/box_13W-13E-4-8N_meanT-50_from5000km2.nc'
    out = cnst.network_data + 'figs/CLOVER/'

    box = [-10, 10, 5.5, 9]
    TNORTH = [-10, 10, 11, 18]
    TSOUTH = [-10, 10, 5, 9]

    dam = xr.open_dataset(srfc)
    dam = u_darrays.flip_lat(dam)
    dam = dam['t2m']
    tsouth = dam.sel(longitude=slice(TSOUTH[0], TSOUTH[1]),
                     latitude=slice(TSOUTH[2], TSOUTH[3]))
    tnorth = dam.sel(longitude=slice(TNORTH[0], TNORTH[1]),
                     latitude=slice(TNORTH[2], TNORTH[3]))

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))

    u925 = da['u'].sel(level=925)  #).mean(dim='level')  #slice(850
    u600 = da['u'].sel(level=650)
    qq = da['q'].sel(level=925)

    mcs_temp = xr.open_dataset(mcs)
    mcs_temp = mcs_temp['tir']
    months = [3, 4, 5, 9, 10]
    mnames = {3: 'March', 4: 'April', 5: 'May', 9: 'September', 10: 'October'}

    for m in months:

        south = tsouth[(tsouth['time.month'] == m)]
        north = tnorth[(tnorth['time.month'] == m)]
        ulow = u925[(u925['time.month'] == m)]
        uhigh = u600[(u600['time.month'] == m)]
        mcs_month = mcs_temp[mcs_temp['time.month'] == m]
        qmonth = qq[(qq['time.month'] == m)]
        ##da = da.sel(longitude=slice(-18,51), latitude=slice(36, -37))

        south_peryear = south.groupby('time.year').mean('longitude').min(
            'latitude')
        south_peryear5 = south_peryear.rolling(time=5, center=True).mean()
        north_peryear = north.groupby('time.year').mean('longitude').max(
            'latitude')
        north_peryear5 = north_peryear.rolling(time=5, center=True).mean()

        u925_peryear = ulow.groupby('time.year').mean('longitude').max(
            'latitude')  #ulow.groupby('time.year').mean()
        u925_peryear5 = u925_peryear.rolling(time=5, center=True).mean()

        u600_peryear = uhigh.groupby('time.year').mean('longitude').min(
            'latitude')  #.mean() # ('latitude').min()
        u600_peryear5 = u600_peryear.rolling(time=5, center=True).mean()

        q_peryear = qmonth.groupby('time.year').mean('longitude').max(
            'latitude')  #.mean() # ('latitude').min()
        q_peryear5 = q_peryear.rolling(time=5, center=True).mean()

        tgrad = ((north_peryear - south_peryear)[4::])
        shear = (u600_peryear - u925_peryear)[4::]  # -q_peryear[4::]#

        r = stats.pearsonr(shear.values[1::] - shear.values[0:-1],
                           mcs_month.values[1::] - mcs_month.values[0:-1])
        tshear_cor = stats.pearsonr(shear.values[1::] - shear.values[0:-1],
                                    tgrad.values[1::] - tgrad.values[0:-1])

        sslope, sint = ustats.linear_trend(shear)
        mslope, mint = ustats.linear_trend(mcs_month)

        x = np.arange(0, len(shear))
        rr = r[0]
        f = plt.figure(figsize=(6, 3))
        ax = f.add_subplot(111)
        ax.plot(tgrad.year,
                shear,
                'x-',
                label='Zonal wind shear 600-925hPa',
                color='k')
        ax.plot(tgrad.year, sint + x * sslope, '--', color='k')
        ax.set_ylim(-14, -7)
        ax1 = ax.twinx()
        ax1.plot(mcs_month['time.year'],
                 mcs_month,
                 'o-',
                 label='Mean MCS temp.',
                 color='r')
        ax1.plot(tgrad.year, mint + x * mslope, '--', color='r')
        mcsline = lines.Line2D([], [],
                               color='r',
                               label='Mean MCS temp.',
                               linestyle='solid',
                               marker='o')
        shearline = lines.Line2D([], [],
                                 color='k',
                                 label='Zonal wind shear 600-925hPa',
                                 linestyle='solid',
                                 marker='x',
                                 markersize=5)
        ax1.set_ylabel('degC')
        ax.set_ylabel('m s-1')
        ax.set_title(mnames[m] + ' | Corr.:' + str(np.round(rr, decimals=2)) +
                     '| Tgrad/Shear corr: ' +
                     str(np.round(tshear_cor[0], decimals=2)))
        if m == 3:
            ax.legend(handles=[mcsline, shearline])
        f.savefig(out + 'trend_timeseries_' + str(m) + '.png')

        plt.close('all')
Ejemplo n.º 28
0
def trend_all():

    pl = cnst.ERA5_MONTHLY_PL_SYNOP_HU  #cnst.ERA_MONTHLY_PL_SYNOP
    #mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_mean_5000km2.nc'

    fpath = cnst.network_data + 'figs/HUARAZ/monthly/'

    box = [-82, -40, -28, 4]  #  [-18,40,0,25] #

    topo = xr.open_dataset(
        '/media/ck/Elements/SouthAmerica/ERA5/monthly/ERA5_static_synop_0.7deg.nc'
    )
    topo = u_darrays.flip_lat(topo)
    z = topo['z'].isel(number=0, time=0)
    z = z.sel(longitude=slice(box[0], box[1]), latitude=slice(box[2],
                                                              box[3])).values

    da = xr.open_mfdataset(pl + '/*.nc')  #xr.open_dataset(pl)
    #da = xr.decode_cf(da)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]),
                time=(da['time.hour'] == 12)).load()
    #ipdb.set_trace()
    #da3 = xr.open_dataarray(mcs)*100
    #da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2],box[3]))

    lons = da.longitude
    lats = da.latitude

    #ipdb.set_trace()

    low_press = 925
    up_press = 200
    mid_press = 500

    gp = da['z'].mean('time') / 9.81

    gp_high = da['z'].sel(level=up_press) / 9.81
    w_mid = da['w'].sel(level=mid_press)

    low_z = gp.sel(level=low_press) > z
    mid_z = gp.sel(level=mid_press) > z

    tlow = da['t'].sel(level=low_press) - 273.15  # .where(low_z)-273.15
    qlow = da['q'].sel(level=low_press) * 1000  #.where(low_z)*1000

    tmid = da['t'].sel(level=mid_press) - 273.15  # .where(mid_z)-273.15
    qmid = da['q'].sel(level=mid_press) * 1000  # .where(mid_z)*1000

    theta_low = u_met.theta_e(low_press, tlow, qlow)
    theta_high = u_met.theta_e(mid_press, tmid, qmid)

    theta_e = theta_low - theta_high

    u600 = da['u'].sel(level=up_press)  #.where(mid_z)
    v600 = da['v'].sel(level=up_press)  #.where(mid_z)

    u600.name = 'u200'

    ws600 = u_met.u_v_to_ws_wd(u600, v600)

    u800 = da['u'].sel(level=mid_press)  # 200-500 shear
    v800 = da['v'].sel(level=mid_press)

    shear_u = u600 - u800
    shear_v = v600 - v800

    ws_shear = u_met.u_v_to_ws_wd(shear_u.values, shear_v.values)

    ws_600 = u600.copy(deep=True)
    ws_600.name = 'ws'

    ws_600.values = ws600[0]

    shear = u600.copy(deep=True)
    shear.name = 'shear'
    shear.values = ws_shear[0]

    vars = [
        't600', 'q600', 'shear', 'q500', 'u200', 'v200', 'u500', 'v500', 'gp',
        'w500'
    ]
    data = [tlow, qlow, shear, qmid, u600, v600, u800, v800, gp_high, w_mid]

    #grid = u600.salem.grid.regrid(factor=0.5)

    #tir = grid.lookup_transform(da3)  #t2d.salem.lookup_transform(da3['tir']) #

    #grid = grid.to_dataset()
    #tir = xr.DataArray(tir, coords=[da3['time'],  grid['y'], grid['x']], dims=['time',  'latitude','longitude'])

    months = [
        5, 6
    ]  #,2,3,4,5,6,7,8,9,10,11,12]#[3,4,5,6,9,10,11]#,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    #ipdb.set_trace()
    for m in months:

        dic = {}
        for v in vars:
            dic[v] = 0

        if type(m) == int:
            m = [m]

        for v, dat in zip(vars, data):
            print('Doing ', v)
            dic[v] = get_trend(dat,
                               m,
                               sig=False,
                               wilks=False,
                               method='polyfit')

        if len(m) == 1:
            fp = fpath + 'low_ERA5_trend_synop_HU_poly_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'low_ERA5_trend_synop_HU_poly_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'
        #ipdb.set_trace()

        map = shear.salem.get_map()
        # ti_da = t2d.salem.transform(ti_da)

        f = plt.figure(figsize=(15, 8), dpi=300)

        # transform their coordinates to the map reference system and plot the arrows
        xo, yo = map.grid.transform(shear.longitude.values,
                                    shear.latitude.values,
                                    crs=shear.salem.grid.proj)

        xx, yy = np.meshgrid(xo, yo)
        #Quiver only every 7th grid point
        u = (dic['u200'][0]).values[1::2, 1::2]  # 200hpa
        v = (dic['v200'][0]).values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu = (dic['u200'][1]).values[1::2, 1::2]  # 200mean
        vv = (dic['v200'][1]).values[1::2, 1::2]

        u500 = (dic['u500'][0]).values[1::2, 1::2]
        v500 = (dic['v500'][0]).values[1::2, 1::2]

        #Quiver only every 7th grid point
        uu500 = (dic['u500'][1]).values[1::2, 1::2]
        vv500 = (dic['v500'][1]).values[1::2, 1::2]

        xx = xx[1::2, 1::2]
        yy = yy[1::2, 1::2]

        ax1 = f.add_subplot(231)
        map.set_data((dic['t600'][0]).values,
                     interp='linear')  # interp='linear'

        map.set_contour(((dic['t600'][1]).values).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5)
        map.set_plot_params(
            levels=[-0.5, -0.4, -0.3, -0.2, 0.2, 0.3, 0.4, 0.5],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        pdic = map.visualize(ax=ax1,
                             title='500hpa t trend | contours: mean t',
                             cbar_title='K decade-1')
        contours = pdic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax2 = f.add_subplot(232)
        map.set_data((dic['q600'][0]).values,
                     interp='linear')  # interp='linear'
        map.set_contour(((dic['q600'][1]).values).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5)  #[6,8,10,12,14,16]
        map.set_plot_params(
            levels=[-0.2, -0.1, -0.05, -0.01, 0.01, 0.05, 0.1, 0.2],
            cmap='RdBu',
            extend='both'
        )  # levels=np.arange(-0.5,0.51,0.1), [-0.6,-0.4,-0.2,0.2,0.4,0.6]

        pdic = map.visualize(
            ax=ax2,
            title='500hPa Spec. humidity trend | contours: mean q',
            cbar_title='g kg-1 decade-1')
        contours = pdic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax3 = f.add_subplot(233)
        map.set_data((dic['u200'][0]).values,
                     interp='linear')  # interp='linear'
        #map.set_contour(u6_mean.values, interp='linear', colors='k')

        map.set_plot_params(
            levels=[-0.8, -0.6, -0.4, -0.2, 0.2, 0.4, 0.6, 0.8],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(ax=ax3,
                      title='200hPa wind  trend, mean 200hPa wind',
                      cbar_title='m s-1 decade-1')
        qu = ax3.quiver(xx, yy, uu, vv, scale=70, width=0.002)

        cc = ['b', 'r', 'k']
        for ii in range(3):
            use = (dic['u200'][2]).values[ii, :]
            for id, xpos in enumerate(xo):
                #ipdb.set_trace()
                ax3.plot(xpos, yo[use[id]], color=cc[ii], marker='o')

        qk = plt.quiverkey(qu,
                           0.4,
                           0.03,
                           1,
                           '1 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax4 = f.add_subplot(234)
        map.set_data((dic['u500'][0]).values,
                     interp='linear')  # interp='linear'
        #map.set_contour(u8_mean.values, interp='linear', colors='k')

        map.set_plot_params(
            levels=[-0.8, -0.6, -0.4, -0.2, 0.2, 0.4, 0.6, 0.8],
            cmap='RdBu_r',
            extend='both')  # levels=np.arange(-0.5,0.51,0.1)
        map.visualize(
            ax=ax4,
            title='500hPa zonal wind trend, mean 500hpa wind vectors',
            cbar_title='m s-1 decade-1')
        qu = ax4.quiver(xx, yy, uu500, vv500, scale=70, width=0.002)

        qk = plt.quiverkey(qu,
                           0.4,
                           0.03,
                           1,
                           '1 m s$^{-1}$',
                           labelpos='E',
                           coordinates='figure')

        ax5 = f.add_subplot(235)
        map.set_data((dic['gp'][0]).values / 1000,
                     interp='linear')  # interp='linear'

        map.set_contour(((dic['gp'][1]).values / 1000).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5)
        map.set_plot_params(levels=np.linspace(-10, 10, 40),
                            cmap='RdBu_r',
                            extend='both')  # levels=np.arange(-0.5,0.51,0.1),
        cc = ['b', 'r', 'k']
        for ids, pp in enumerate((dic['gp'][2])):
            xh, yh = map.grid.transform(pp[0],
                                        pp[1],
                                        crs=shear.salem.grid.proj)
            ax5.plot(xh, yh, color=cc[ids], marker='o')
        #map.set_contour((t2_mean.values).astype(np.float64), interp='linear', colors='k', linewidths=0.5, levels=np.linspace(800,925,8))
        #map.set_plot_params(levels=[-0.5,-0.4,-0.3,-0.2,-0.1,-0.05,-0.02, 0.02,0.05,0.1,0.2,0.3,0.4,0.5], cmap='RdBu_r', extend='both')  # levels=np.arange(-0.5,0.51,0.1),

        pdic = map.visualize(
            ax=ax5,
            title='200hpa geopotential trend | contours: mean geopotential',
            cbar_title='m decade-1')
        contours = pdic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        ax6 = f.add_subplot(236)
        map.set_data((dic['w500'][0]).values * 10,
                     interp='linear')  # interp='linear'
        map.set_contour(((dic['w500'][1]).values * 10).astype(np.float64),
                        interp='linear',
                        colors='k',
                        linewidths=0.5)  #[6,8,10,12,14,16]
        map.set_plot_params(
            levels=[-0.2, -0.1, -0.05, -0.01, 0.01, 0.05, 0.1, 0.2],
            cmap='RdBu',
            extend='both'
        )  # levels=np.arange(-0.5,0.51,0.1), [-0.6,-0.4,-0.2,0.2,0.4,0.6]

        pdic = map.visualize(ax=ax6,
                             title='500hPa w mean | contours: mean w',
                             cbar_title='P s-1 decade-1')
        contours = pdic['contour'][0]
        plt.clabel(contours, inline=True, fontsize=7, fmt='%1.1f')

        plt.tight_layout()
        plt.savefig(fp)
        plt.close('all')
Ejemplo n.º 29
0
def trend_all():

    srfc = cnst.ERA_MONTHLY_SRFC
    pl = cnst.ERA_MONTHLY_PL
    mcs = cnst.GRIDSAT + 'aggs/gridsat_WA_-70_monthly_count.nc'

    fpath = cnst.network_data + 'figs/CLOVER/months/'

    box = [-18, 40, 0, 25]
    #box=[-18,55,-45,45]
    #box = [-8, -4, 5.5, 7.5]

    da = xr.open_dataset(pl)
    da = u_darrays.flip_lat(da)
    da = da.sel(longitude=slice(box[0], box[1]),
                latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))
    da2 = xr.open_dataset(srfc)
    da2 = u_darrays.flip_lat(da2)
    da2 = da2.sel(longitude=slice(box[0], box[1]),
                  latitude=slice(box[2], box[3]))  #latitude=slice(36, -37))
    da3 = xr.open_dataset(mcs)
    da3 = da3.sel(lon=slice(box[0], box[1]), lat=slice(box[2], box[3]))

    lons = da.longitude
    lats = da.latitude

    #q = da['q'].sel(level=925)
    q = da['q'].sel(level=650)
    #q = da2['tcwv']
    t2d = da['t'].sel(level=925)

    u925 = da['v'].sel(level=925)  #).mean(dim='level')  #slice(850
    u600 = da['u'].sel(level=slice(650, 700)).mean(dim='level')

    shear = u600 - u925

    q.values = q.values * 1000

    grid = t2d.salem.grid.regrid(factor=0.5)
    t2 = grid.lookup_transform(t2d)
    tir = grid.lookup_transform(da3['tir'])
    q = grid.lookup_transform(q)

    shear = grid.lookup_transform(shear)

    # tir = t2d.salem.lookup_transform(da3['tir'])
    #
    # t2 = t2d
    # #tir = da3['tir']
    # q = q
    # shear = shear

    grid = grid.to_dataset()

    t2 = xr.DataArray(t2,
                      coords=[t2d['time'], grid['y'], grid['x']],
                      dims=['time', 'latitude', 'longitude'])
    q = xr.DataArray(q,
                     coords=[t2d['time'], grid['y'], grid['x']],
                     dims=['time', 'latitude', 'longitude'])
    tir = xr.DataArray(tir,
                       coords=[da3['time'], grid['y'], grid['x']],
                       dims=['time', 'latitude', 'longitude'])
    shear = xr.DataArray(shear,
                         coords=[t2d['time'], grid['y'], grid['x']],
                         dims=['time', 'latitude', 'longitude'])

    #months = np.arange(1,13)
    months = [3, 10]

    dicm = {}
    dicmean = {}

    def array_juggling(data, month, hour=None):

        m = month

        if hour != None:
            data = data[(data['time.month'] == m)
                        & (data['time.hour'] == hour)]
        else:
            data = data[(data['time.month'] == m)]
        data_years = data.groupby('time.year').mean(axis=0)

        # stack lat and lon into a single dimension called allpoints
        datastacked = data_years.stack(allpoints=['latitude', 'longitude'])

        # apply the function over allpoints to calculate the trend at each point
        print('Entering trend calc')
        dtrend = datastacked.groupby('allpoints').apply(
            u_darrays.linear_trend_mk, alpha=0.05, eps=0.0001)

        ddtrend = dtrend['slope']
        pthresh = 0.05
        # try:
        #     pthresh = us.fdr_threshold(dtrend['pval'].values[np.isfinite(dtrend['pval'].values)], alpha=0.05)
        #     ddtrend.values[(dtrend['pval'].values > pthresh) | np.isnan(dtrend['pval'].values)] = np.nan
        # except ValueError:
        #     ddtrend.values = ddtrend.values*np.nan
        #     pthresh = np.nan
        ddtrend.values[(dtrend['pval'].values > pthresh)
                       | np.isnan(dtrend['pval'].values)] = np.nan
        print('p value threshold', pthresh)

        # unstack back to lat lon coordinates
        return ddtrend.unstack('allpoints'), data_years

    for m in months:

        tirtrend, dyears = array_juggling(tir, m)
        tirm_mean = dyears.mean(axis=0)

        t2trend, dummy = array_juggling(t2, m)  #, hour=12

        qtrend, dummy = array_juggling(q, m)  #, hour=12

        sheartrend, dummy = array_juggling(shear, m)  #, hour=12

        t2trend_unstacked = t2trend * 10.  # warming over decade
        qtrend_unstacked = qtrend * 10.  # warming over decade
        sheartrend_unstacked = sheartrend * 10.  # warming over decade
        tirtrend_unstacked = (
            (tirtrend.values) * 10. / tirm_mean.values) * 100.

        dicm[m] = tirtrend_unstacked
        dicmean[m] = tirm_mean

        t_da = t2trend_unstacked  #xr.DataArray(t2trend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        q_da = qtrend_unstacked  #xr.DataArray(qtrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        s_da = sheartrend_unstacked  #xr.DataArray(sheartrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])
        ti_da = tirtrend_unstacked  #xr.DataArray(tirtrend_unstacked, coords=[lats, lons], dims=['latitude', 'longitude'])

        fp = fpath + 'ttrend_mktrend_-70C_coarse_vsurface' + str(m).zfill(
            2) + '.png'  #'ttrend_synop_-70C_coarse_'+str(m).zfill(2)+'.png'
        map = shear.salem.get_map()

        f = plt.figure(figsize=(8, 5), dpi=300)
        ax1 = f.add_subplot(221)

        # map.set_shapefile(rivers=True)
        map.set_plot_params()

        map.set_data(t_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu_r',
                            extend='both')
        map.visualize(ax=ax1, title='t2')

        ax2 = f.add_subplot(222)
        map.set_data(q_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-0.5, 0.51, 0.1),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax2, title='q')

        ax3 = f.add_subplot(223)
        map.set_data(s_da)  # interp='linear'
        map.set_plot_params(levels=np.arange(-1, 1.1, 0.2),
                            cmap='RdBu',
                            extend='both')
        map.visualize(ax=ax3, title='u-shear')

        ax4 = f.add_subplot(224)
        map.set_data(ti_da)  # interp='linear'
        map.set_plot_params(cmap='Blues',
                            extend='both',
                            levels=np.arange(20, 101,
                                             20))  #levels=np.arange(20,101,20)
        map.visualize(ax=ax4, title='-70C frequency')

        plt.savefig(fp)
        plt.close('all')

    pkl.dump(dicm,
             open(cnst.network_data + 'data/CLOVER/saves/storm_frac.p', 'wb'))
    pkl.dump(
        dicmean,
        open(cnst.network_data + 'data/CLOVER/saves/storm_frac_mean.p', 'wb'))