Ejemplo n.º 1
0
def loop(y):

    out = cnst.local_data + 'GRIDSAT/MCS18/'
    infolder = cnst.local_data + 'GRIDSAT/www.ncei.noaa.gov/data/geostationary-ir-channel-brightness-temperature-gridsat-b1/access/'
    filename = 'gridsat_WA_-40_1000km2_15-21UTC' + str(y) + '.nc'
    da = None
    if os.path.isfile(out + filename):
        return

    files = glob.glob(infolder + str(y) + '/GRIDSAT-AFRICA_CP*.nc')
    files.sort()
    for f in files:
        print('Doing ' + f)

        df = xr.open_dataset(f)

        if (df['time.hour']<15) | (df['time.hour']>21):
            continue

        df.rename({'irwin_cdr': 'tir'}, inplace=True)
        df['tir'].values = df['tir'].values-273.15
        labels, goodinds = ua.blob_define(df['tir'].values, -40, minmax_area=[16, 25000],
                                          max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
        df['tir'].values[labels == 0] = 0
        df['tir'].values[df['tir'].values < -110] = 0
        df['tir'].values = (np.round(df['tir'].values, decimals=2)*100).astype(np.int16)
        try:
            da = xr.concat([da, df], dim='time')
        except TypeError:
            da = df.copy()

    enc = {'tir': {'complevel': 5, 'shuffle': True, 'zlib': True}}
    da.to_netcdf(out + filename, encoding=enc)
    da.close()
Ejemplo n.º 2
0
def file_loop(f):

    print('Doing ' + f)

    ydic = {}

    df = xr.open_dataset(f)
    df = df.sel(latitude=slice(-4,13), longitude=slice(-18,20))
    if (int(df['time.month'][0]) < 9) & (int(df['time.month'][0]) > 6):
         print('return')
         return

    df = df['lw_out_PBLtop'][df['time.hour'] == 18]

    for d in df:

        print('Read data')

        d.values = u_met.olr_to_bt(d.values)
        #ipdb.set_trace()
        labels, goodinds = ua.blob_define(d.values, -50, minmax_area=[258, 40000], max_area=None) # 4.4*4.4km: 258 pixel = 5000km2, 40000 pixel = 350000km2
        for g in goodinds:

            if g==0:
                continue

            pos = np.where(labels==g)

            dic = dictionary()

            ts = pd.to_datetime(d['time'].values)
            date = ts.strftime('%Y-%m-%d_%H:%M:%S')

            dic['date'] = ts


            dic['month'] = int(d['time.month'])
            dic['year'] = int(d['time.year'])

            storm = d.values[pos]

            dic['area'] = storm.size
            dic['70area'] = np.sum(storm<=-70)
            dic['minlon'] = np.min(d.longitude.values[pos[1]])
            dic['minlat'] = np.min(d.latitude.values[pos[0]])
            dic['maxlon'] = np.max(d.longitude.values[pos[1]])
            dic['maxlat'] = np.max(d.latitude.values[pos[0]])
            dic['clon'] = dic['minlon'] + (dic['maxlon'] - dic['minlon'])/2
            dic['clat'] = dic['minlat'] + (dic['maxlat'] - dic['minlat'])/2
            dic['tmin'] = np.min(storm)
            dic['tmean'] = np.mean(storm)
            dic['t10'] = np.percentile(storm, 10)
            dic['t90'] = np.percentile(storm, 10)

            ydic[date + '_' + str(g)] = dic

    return ydic
Ejemplo n.º 3
0
def file_loop(f):

    print('Doing ' + f)

    ydic = {}

    df = xr.open_dataset(f)
    df = df.sel(lat=slice(-4,25), lon=slice(-18,20))

    for d in df['tir']:

        labels, goodinds = ua.blob_define(d.values, -50, minmax_area=[83, 25000], max_area=None) # 7.7x7.7km = 64km2 per pix in gridsat? 83 pix is 5000km2

        for g in goodinds:

            if g==0:
                continue

            pos = np.where(labels==g)
            dic = dictionary()

            ts = pd.to_datetime(d['time'].values)
            date = ts.strftime('%Y-%m-%d_%H:%M:%S')

            dic['date'] = ts
            dic['month'] = int(d['time.month'])
            dic['year'] = int(d['time.year'])

            storm = d.values[pos]

            dic['area'] = storm.size
            dic['70area'] = np.sum(storm<=-70)
            dic['minlon'] = np.min(d.lon.values[pos[1]])
            dic['minlat'] = np.min(d.lat.values[pos[0]])
            dic['maxlon'] = np.max(d.lon.values[pos[1]])
            dic['maxlat'] = np.max(d.lat.values[pos[0]])
            dic['clon'] = dic['minlon'] + (dic['maxlon'] - dic['minlon'])/2
            dic['clat'] = dic['minlat'] + (dic['maxlat'] - dic['minlat'])/2
            dic['tmin'] = np.min(storm)
            dic['tmean'] = np.mean(storm)
            dic['t10'] = np.percentile(storm, 10)
            dic['t90'] = np.percentile(storm, 10)

            ydic[date + '_' + str(g)] = dic

    return ydic
Ejemplo n.º 4
0
def saveYearly():

    out = '/users/global/cornkle/mymachine/GRIDSAT/MCS18/'
    infolder = '/users/global/cornkle/mymachine/GRIDSAT/www.ncei.noaa.gov/data/geostationary-ir-channel-brightness-temperature-gridsat-b1/access/'

    years = np.arange(1983, 2018)  # list(next(os.walk(msg_folder))[1])

    for y in years:
        filename = 'gridsat_WA_' + str(y) + '.nc'
        da = None
        if os.path.isfile(out + filename):
            continue

        files = glob.glob(infolder + str(y) + '/GRIDSAT-AFRICA_CP*.nc')
        files.sort()
        for f in files:
            print('Doing ' + f)

            df = xr.open_dataset(f)
            if df['time.hour'] != 18:
                continue

            df.rename({'irwin_cdr': 'tir'}, inplace=True)
            df['tir'].values = df['tir'].values - 273.15
            labels, goodinds = ua.blob_define(
                df['tir'].values, -70, minmax_area=[83, 25000], max_area=None
            )  # 7.7x7.7km = 64km2 per pix in gridsat? 83 pix is 5000km2
            df['tir'].values[labels == 0] = 0
            df['tir'].values[df['tir'].values < -110] = 0
            try:
                da = xr.concat([da, df], dim='time')
            except TypeError:
                da = df.copy()

        enc = {'tir': {'complevel': 5, 'shuffle': True, 'zlib': True}}
        da.to_netcdf(out + filename, encoding=enc)
        da.close()
Ejemplo n.º 5
0
def loop(y):

    out = cnst.local_data + 'GRIDSAT/MCS18/'
    infolder = cnst.local_data + 'GRIDSAT/www.ncei.noaa.gov/data/geostationary-ir-channel-brightness-temperature-gridsat-b1/access/'
    filename = 'gridsat_WA_-70_5000km2_15-21UTC' + str(y) + '.nc'
    da = None
    if os.path.isfile(out + filename):
        return

    files = glob.glob(infolder + str(y) + '/GRIDSAT-AFRICA_CP*.nc')
    files.sort()
    for f in files:
        print('Doing ' + f)

        df = xr.open_dataset(f)
        #ipdb.set_trace()

        if (df['time.hour'] < 15) | (df['time.hour'] > 21):
            continue

        df.rename({'irwin_cdr': 'tir'}, inplace=True)
        df['tir'].values = df['tir'].values - 273.15
        labels, goodinds = ua.blob_define(
            df['tir'].values, -70, minmax_area=[83, 25000],
            max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
        df['tir'].values[labels == 0] = 0
        df['tir'].values[df['tir'].values < -110] = 0
        df['tir'].values = (np.round(df['tir'].values, decimals=2) *
                            100).astype(np.int16)
        try:
            da = xr.concat([da, df], dim='time')
        except TypeError:
            da = df.copy()

    enc = {'tir': {'complevel': 5, 'shuffle': True, 'zlib': True}}
    da.to_netcdf(out + filename, encoding=enc)
    da.close()
Ejemplo n.º 6
0
def saveMCS_WA15(year):
    trmm_folder = cnst.network_data + 'data/OBS/IMERG_HQ_precip_old'
    msg_folder = cnst.network_data + 'data/OBS/MSG_WA30'  #meteosat_WA30'
    msg_folder2 = cnst.network_data + 'data/OBS/MSG_MAMON'

    mJJAS = msg.ReadMsg(msg_folder)
    mMAMON = msg.ReadMsg(msg_folder2)
    cnt = 0
    _y = year

    for ids, _m in enumerate(range(3, 12)):

        files = glob.glob(
            trmm_folder + '/' + str(_y) + '/' + str(_m).zfill(2) + '/*.nc4'
        )  # area=[-12, 12, 4, 9])   # [-15, 15, 4, 21], [-10, 10, 10, 20]

        for tf in files:

            t = xr.open_dataset(tf)

            _h = t['time.hour'].values[0]

            _d = t['time.day'].values[0]
            _mi = t['time.minute'].values[0]

            # if (_h <15) | (_h>21):
            #     print('Wrong hour')
            #     continue

            if (_m < 3) | (_m > 11):
                print('Wrong month')
                continue

            da = t['HQprecipitation'].squeeze()
            da = da.T
            tdic = da.sel(lat=slice(4.3, 9), lon=slice(-14,
                                                       14))  #[-12, 15, 5, 25]

            if np.sum(tdic.values) <= 0.01:
                continue

            if _m in [3, 4, 5, 10, 11]:
                m = mMAMON
            else:
                m = mJJAS

            date = dt.datetime(_y, _m, _d, _h, _mi)
            arr = np.array([15, 30, 45, 60, 0])

            # get closest minute
            dm = arr - _mi
            if (dm < 0).any():
                dm = dm[dm < 0]

            try:
                ind = (np.abs(dm)).argmin()
            except ValueError:
                continue

            # set zero shift time for msg

            dt0 = dm[ind]
            ndate = date + dt.timedelta(minutes=int(dt0))
            m.set_date(ndate.year, ndate.month, ndate.day, ndate.hour,
                       ndate.minute)

            mdic = m.get_data(llbox=[
                tdic['lon'].values.min(), tdic['lon'].values.max(),
                tdic['lat'].values.min(), tdic['lat'].values.max()
            ])

            # check whether date is completely missing or just 30mins interval exists
            # if str(date) == '2004-05-02 13:15:00':
            #     pdb.set_trace()
            if not mdic:
                dm = np.delete(dm, np.argmin(np.abs(dm)), axis=0)
                try:
                    dummy = np.min(np.abs(dm)) > 15
                except ValueError:
                    continue
                if dummy:
                    print('Date missing')
                    continue
                ind = (np.abs(dm)).argmin()
                dt0 = dm[ind]
                ndate = date + dt.timedelta(minutes=int(dt0))
                m.set_date(ndate.year, ndate.month, ndate.day, ndate.hour,
                           ndate.minute)
                mdic = m.get_data(llbox=[
                    tdic['lon'].values.min(), tdic['lon'].values.max(),
                    tdic['lat'].values.min(), tdic['lat'].values.max()
                ])

                if not mdic:
                    print('Date missing')
                    continue

            print('TRMM:', date, 'MSG:', ndate.year, ndate.month, ndate.day,
                  ndate.hour, ndate.minute)

            lon1 = mdic['lon'].values
            lat1 = mdic['lat'].values

            if ids == 0:

                inds, weights, shape = u_int.interpolation_weights_grid(
                    lon1, lat1, t.salem.grid)

            orig = mdic['t'].values

            try:
                outorig = u_int.interpolate_data(orig, inds, weights, shape)
            except IndexError:
                print('Interpolation problem, continue')

            outt = outorig.copy()

            labels, goodinds = ua.blob_define(
                outt, -40, minmax_area=[556, 40000],
                max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
            outt[labels == 0] = 0
            outt[outt < -115] = 0
            outt = (np.round(outt, decimals=2) * 100).astype(np.int16)

            outorig = (np.round(outorig, decimals=2) * 100).astype(np.int16)

            df = xr.Dataset(
                {
                    'rain': (['lon', 'lat'], da.values),
                    'mcs': (['lon', 'lat'], outt),
                    'tir': (['lon', 'lat'], outorig),
                },
                coords={
                    'lon': da.lon,
                    'lat': da.lat,
                    'time': date
                })

            # try:
            #     blob = xr.DataArray(power_msg[np.newaxis, :],
            #                         coords={'time': date, 'lat': latitudes, 'lon': longitudes},
            #                         dims=['time', 'lat', 'lon'])  # [np.newaxis, :])
            # except ValueError:
            #     ipdb.set_trace()
            # tir = xr.DataArray(new_savet[np.newaxis, :], coords={'time': date, 'lat': latitudes, 'lon': longitudes},
            #                    dims=['time', 'lat', 'lon'])

            try:
                ds = xr.concat([ds, df], dim='time')
            except TypeError:
                ds = df.copy()

        savefile = cnst.network_data + 'MCSfiles/TIR_on_GPM/GPM_MCS_' + str(
            _y) + '-' + str(_m).zfill(2) + '.nc'
        try:
            os.remove(savefile)
        except OSError:
            print('OSError, no dir?')
            pass

        ds.to_netcdf(path=savefile, mode='w')
        print('Saved ' + savefile)
        ds.close()
Ejemplo n.º 7
0
def file_loop(f):
    print('Doing file: ' + f)
    dic = xr.open_dataset(f)
    era = xr.open_dataset(cnst.ERA_DAILY_PL12UTC)

    getera = np.where((era['time.day'] == dic['time.day'])
                      & (era['time.month'] == dic['time.month'])
                      & (era['time.year'] == dic['time.year']))
    try:
        era_day = era.isel(time=int(getera[0]))
    except TypeError:
        print('Era missing')
        return

    out = dictionary()
    res = []
    outt = dic['tc_lag0'].values

    test70 = outt.copy()
    labels, goodinds = ua.blob_define(test70, -70, min_area=200)

    if np.sum(goodinds) == 0:
        return

    outp = dic['p'].values

    tminpos = np.where(dic['tc_lag0'].values == np.nanmin(
        dic['tc_lag0'].values))  # era position close to min temp
    if len(tminpos[0]) > 1:
        ptmax = np.nanmax((dic['p'].values)[tminpos])
        if ptmax > 0:
            prpos = np.where((dic['p'].values)[tminpos] == ptmax)
            tminpos = ((tminpos[0])[prpos], (tminpos[1])[prpos])
        else:
            tminpos = ((tminpos[0])[0], (tminpos[1])[0])

    elon = dic['lon'].values[tminpos]
    elat = dic['lat'].values[tminpos]

    era_day = era_day.sel(latitude=elat, longitude=elon, method='nearest')

    e925 = era_day.sel(level=925).mean()
    elow = era_day.sel(level=slice(925, 850)).mean('level').mean()
    e650 = era_day.sel(level=650).mean()
    emid = era_day.sel(level=slice(600, 700)).mean('level').mean()

    out['lon'] = dic['lon'].values
    out['lat'] = dic['lat'].values
    out['hour'] = dic['time.hour'].item()
    out['month'] = dic['time.month'].item()
    out['year'] = dic['time.year'].item()
    out['date'] = dic['time'].values

    t_thresh = -50  # -40C ~ 167 W m-2
    mask = np.isfinite(outp) & (outt <= t_thresh) & np.isfinite(outt)
    mask_area = (outt <= t_thresh) & np.isfinite(outt)

    if np.sum(mask) < 3:
        return

    out['clat'] = np.min(out['lat']) + (
        (np.max(out['lat']) - np.min(out['lat'])) * 0.5)
    out['clon'] = np.min(out['lon']) + (
        (np.max(out['lon']) - np.min(out['lon'])) * 0.5)

    isfin = np.sum((np.isfinite(outp)) & ((outt <= t_thresh)))

    if isfin < 3:
        return

    print(
        np.nanmax(outt[mask])
    )  # can be bigger than cutout threshold because of interpolation to 5km grid after cutout

    out['area'] = np.sum(mask_area)

    out['clat'] = np.min(out['lat']) + (
        (np.max(out['lat']) - np.min(out['lat'])) * 0.5)
    out['clon'] = np.min(out['lon']) + (
        (np.max(out['lon']) - np.min(out['lon'])) * 0.5)

    out['tmin'] = np.min(outt[mask])
    out['tmean'] = np.mean(outt[mask])
    out['pmax'] = np.max(outp[mask])
    out['pmean'] = np.mean(outp[mask])
    try:
        out['q925'] = float(e925['q'])
    except TypeError:
        return

    out['q650'] = float(e650['q'])
    out['v925'] = float(e925['v'])
    out['v650'] = float(e925['v'])
    out['u925'] = float(e925['u'])
    out['u650'] = float(e650['u'])
    out['w925'] = float(e925['w'])
    out['w650'] = float(e650['w'])
    out['rh925'] = float(e925['r'])
    out['rh650'] = float(e650['r'])
    out['t925'] = float(e925['t'])
    out['t650'] = float(e650['t'])
    out['pv925'] = float(e925['pv'])
    out['pv650'] = float(e650['pv'])
    out['div925'] = float(e925['d'])
    out['div650'] = float(e650['d'])
    out['q_low'] = float(elow['q'])
    out['q_mid'] = float(emid['q'])

    out['shear'] = float(e650['u'] - e925['u'])

    out['pgt30'] = np.sum(outp[mask] > 30)
    out['isvalid'] = np.sum(mask)
    out['pgt01'] = np.sum(outp[mask] > 0.1)
    #
    out['p'] = outp[mask]
    out['t'] = outt[mask]

    dic.close()

    return out
Ejemplo n.º 8
0
def storm_count_hov():
    msg_folder = cnst.GRIDSAT
    fname = msg_folder + 'gridsat_WA_-40_1000km2_15-21UTC'

    def makedic():
        mdic = {}
        for m in range(1,13):
            mdic[m] = []
        return mdic

    dic75 = makedic()
    dic70 = makedic()
    dic60 = makedic()
    dic50 = makedic()
    dic40 = makedic()


    for y in range(1983,2018): #2018
        ds = xr.open_dataset(fname + str(y) + '.nc')
        for m in range(1,13):


            da = ds['tir'][(ds['time.month'] == m) & (ds['time.hour']==18)]#(ds['time.hour']>=15) & (ds['time.hour']<=21)]
            da.values = da.values / 100
            da = da.sel(lat=slice(5.2, 8), lon=slice(-10, 12))

            val = 0
            storm = np.array([0]*da.shape[1])
            ar = []
            pixel = 78  # 78 # 78 = 5000km2 # 15000 = 253
            for d in da:

                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -40, minmax_area=[pixel, 25000],
                                                  max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                unarr = []
                for ll in labels:
                    isun = np.unique(ll)
                    num = isun.size
                    unarr.append(num)


                unarr = np.array(unarr)
                # if np.sum(unarr-1) > 0:
                #     ipdb.set_trace()

                storm += unarr-1
                val += 1

                print('-40 storm', storm)

            dic40[m].append(storm/val)



            val = 0
            storm = np.array([0]*da.shape[1])
            ar = []
            pixel = 78 #78 # 78 = 5000km2 # 15000 = 253
            for d in da:

                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12)) # 4.5,8.5
                labels, goodinds = ua.blob_define(cut.values, -50, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?

                unarr = []
                for ll in labels:
                    isun = np.unique(ll)
                    num = isun.size
                    unarr.append(num)


                unarr = np.array(unarr)

                storm += unarr-1
                val += 1
            dic50[m].append(storm/val)


            val = 0
            storm = np.array([0]*da.shape[1])
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -60, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                unarr = []
                for ll in labels:
                    isun = np.unique(ll)
                    num = isun.size
                    unarr.append(num)


                unarr = np.array(unarr)

                storm += unarr-1
                val += 1

            dic60[m].append(storm/val)


            val = 0
            storm = np.array([0]*da.shape[1])
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -70, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                unarr = []
                for ll in labels:
                    isun = np.unique(ll)
                    num = isun.size
                    unarr.append(num)


                unarr = np.array(unarr)

                storm += unarr-1
                val += 1

            dic70[m].append(storm/val)


            val = 0
            storm = np.array([0]*da.shape[1])
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -75, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                unarr = []
                for ll in labels:
                    isun = np.unique(ll)
                    num = isun.size
                    unarr.append(num)


                unarr = np.array(unarr)

                storm += unarr-1
                val += 1

            dic75[m].append(storm/val)

    print(40, dic40[3])
    print(50, dic50[3])
    print(60, dic60[3])
    print(70, dic70[3])
    print(75, dic75[3])


    pkl.dump(dic40, open(cnst.network_data + 'data/CLOVER/saves/storm_HOVcount_10W-12E_5-8N_-40C_5000km2_18.p', #4f5-8f5N
                        'wb'))

    pkl.dump(dic50, open(cnst.network_data + 'data/CLOVER/saves/storm_HOVcount_10W-12E_5-8N_-50C_5000km2_18.p', #4f5-8f5N
                        'wb'))

    pkl.dump(dic60, open(cnst.network_data + 'data/CLOVER/saves/storm_HOVcount_10W-12E_5-8N_-60C_5000km2_18.p',
                         'wb'))

    pkl.dump(dic70, open(cnst.network_data + 'data/CLOVER/saves/storm_HOVcount_10W-12E_5-8N_-70C_5000km2_18.p',
                         'wb'))

    pkl.dump(dic75, open(cnst.network_data + 'data/CLOVER/saves/storm_HOVcount_10W-12E_5-8N_-75C_5000km2_18.p',
                        'wb'))
Ejemplo n.º 9
0
def storm_count(area=False):
    msg_folder = cnst.GRIDSAT
    fname = msg_folder + 'gridsat_WA_-40_1000km2_15-21UTC'

    def makedic():
        mdic = {}
        for m in range(1,13):
            mdic[m] = []
        return mdic

    dic75 = makedic()
    dic70 = makedic()
    dic60 = makedic()
    dic50 = makedic()
    dic40 = makedic()

    area75 = makedic()
    area70 = makedic()
    area60 = makedic()
    area50 = makedic()
    area40 = makedic()

    for y in range(1983,2018): #2018
        ds = xr.open_dataset(fname + str(y) + '.nc')
        for m in range(1,13):


            da = ds['tir'][(ds['time.month'] == m) & (ds['time.hour']==18)]#(ds['time.hour']>=15) & (ds['time.hour']<=21)]
            da.values = da.values / 100

            val = 0
            storm = 0
            ar = []
            pixel = 78  # 78 # 78 = 5000km2 # 15000 = 253
            for d in da:

                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))  # 4.5,8.5
                labels, goodinds = ua.blob_define(cut.values, -40, minmax_area=[pixel, 25000],
                                                  max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                if area:
                    for gi in goodinds:
                        ar.append(np.sum(labels == gi))

                storm += np.float(goodinds.size)
                val += 1
            dic40[m].append(storm/val)
            if area:
                try:
                    area40[m].append(np.percentile(ar, 90))
                except IndexError:
                    area40[m].append(np.nan)


            val = 0
            storm = 0
            ar = []
            pixel = 78 #78 # 78 = 5000km2 # 15000 = 253
            for d in da:

                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12)) # 4.5,8.5
                labels, goodinds = ua.blob_define(cut.values, -50, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                if area:
                    for gi in goodinds:
                        ar.append(np.sum(labels==gi))

                storm += np.float(goodinds.size)
                val += 1
            dic50[m].append(storm/val)
            if area:
                try:
                    area50[m].append(np.percentile(ar, 90))
                except IndexError:
                    area50[m].append(np.nan)

            val = 0
            storm = 0
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -60, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                if area:
                    for gi in goodinds:
                        ar.append(np.sum(labels==gi))

                storm += np.float(goodinds.size)
                print(m, goodinds, val)
                val += 1
            dic60[m].append(storm/val)
            if area:
                try:
                    area60[m].append(np.percentile(ar, 90))
                except IndexError:
                    area60[m].append(np.nan)

            val = 0
            storm = 0
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -70, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                if area:
                    for gi in goodinds:
                        ar.append(np.sum(labels==gi))

                storm += np.float(goodinds.size)
                val += 1
            dic70[m].append(storm/val)
            if area:
                try:
                    area70[m].append(np.percentile(ar, 90))
                except IndexError:
                    area70[m].append(np.nan)

            val = 0
            storm = 0
            ar = []
            for d in da:
                cut = d.sel(lat=slice(5.2, 8), lon=slice(-10, 12))
                labels, goodinds = ua.blob_define(cut.values, -75, minmax_area=[pixel, 25000],
                                              max_area=None)  # 7.7x7.7km = 64km2 per pix in gridsat?
                if area:
                    for gi in goodinds:
                        ar.append(np.sum(labels==gi))

                storm += np.float(goodinds.size)
                val += 1
            dic75[m].append(storm/val)
            if area:
                try:
                    area75[m].append(np.percentile(ar, 90))
                except IndexError:
                    area75[m].append(np.nan)
    print(40, dic40[3])
    print(50, dic50[3])
    print(60, dic60[3])
    print(70, dic70[3])
    print(75, dic75[3])


    pkl.dump(dic40, open(cnst.network_data + 'data/CLOVER/saves/storm_count_10W-12E_5-8N_-40C_5000km2_18.p', #4f5-8f5N
                        'wb'))

    pkl.dump(dic50, open(cnst.network_data + 'data/CLOVER/saves/storm_count_10W-12E_5-8N_-50C_5000km2_18.p', #4f5-8f5N
                        'wb'))

    pkl.dump(dic60, open(cnst.network_data + 'data/CLOVER/saves/storm_count_10W-12E_5-8N_-60C_5000km2_18.p',
                         'wb'))

    pkl.dump(dic70, open(cnst.network_data + 'data/CLOVER/saves/storm_count_10W-12E_5-8N_-70C_5000km2_18.p',
                         'wb'))

    pkl.dump(dic75, open(cnst.network_data + 'data/CLOVER/saves/storm_count_10W-12E_5-8N_-75C_5000km2_18.p',
                        'wb'))



    if area:

        pkl.dump(area40, open(cnst.network_data + 'data/CLOVER/saves/storm_90centArea_12W-10E_5-8N_-40C_5000km2_1800.p', #4f5-8f5N
                            'wb'))

        pkl.dump(area50, open(cnst.network_data + 'data/CLOVER/saves/storm_90centArea_12W-10E_5-8N_-50C_5000km2_1800.p', #4f5-8f5N
                            'wb'))

        pkl.dump(area60, open(cnst.network_data + 'data/CLOVER/saves/storm_90centArea_12W-10E_5-8N_-60C_5000km2_1800.p',
                             'wb'))

        pkl.dump(area70, open(cnst.network_data + 'data/CLOVER/saves/storm_90centArea_12W-10E_5-8N_-70C_5000km2_1800.p',
                             'wb'))

        pkl.dump(area75, open(cnst.network_data + 'data/CLOVER/saves/storm_90centArea_12W-10E_5-8N_-75C_5000km2_1800.p',
                            'wb'))