コード例 #1
0
    SST = np.array(SST)
    SST[SST < -100] = np.nan
    SST_flat = SST.reshape(len(tim), X * Y)
    dsst = np.empty((len(tim), X * Y))
    dsst.fill(np.nan)
    tt = np.arange(0, len(tim))
    for i in range(0, len(SST_flat[0, :])):
        valid = ~np.isnan(SST_flat[:, i])
        if (valid.any() == True):
            y = SST_flat[:, i]
            mean, trend, alpha = eo.trend(tt, y)
            dsst[:, i] = y - (tt * trend) - mean
#            dsst[:,i] = signal.detrend(SST_flat[valid,i], axis=0, type='linear')
        elif (valid.all() == False):
            dsst[:, i] = np.nan
        tmp_dsea, sea, beta = eo.deseason_harmonic(dsst[:, i], 4, 12)
        dsst[:, i] = np.squeeze(tmp_dsea[:, 0])
    SSTa_TMm = dsst.reshape(len(tim), Y, X)

#figfile ='/v_Munk_Drive/ecougnon/ana/InBand_Variance/InBandVar_SmoothPeriodogram_Scaling2TotVar_1-3-7-10.png'
#figfile_ ='/v_Munk_Drive/ecougnon/ana/InBand_Variance/InBandVar_SmoothPeriodogram_Scaling2TotVar_1-3-7-10.eps'

figfile_ = '/home/ecougnon/Documents/PotPred_paper/figures/InBandVar_SmoothPeriodogram_Scaling2TotVar_1-3-7-10_HadISST_1871.eps'

####################################
# calc the power spectrum density
###################################

var_tot = np.var(SSTa_TMm, axis=0)
min_tot = 0  #np.nanmin(var_tot)
max_tot = np.nanmax(var_tot)
コード例 #2
0
                      

ds_hadisst_clim = ds_hadisst_mean.sel(time=slice('1961-01-01','1990-12-31'))
tim_vec_hadisst = pd.date_range('1871-01-01','2017-12-31',name='time',freq='M')
## do area-averaged (however, on a regular 1deg grid), depends if the 
# wanted area-averaged needs to be based per degree of m
# for 1961-1990
ds_hadisst_19611990 = ds_hadisst.sel(time=slice('1961-01-01','1990-12-31')). \
                                 mean(dim=('time','longitude','latitude'))
ds_hadisst_19822005 = ds_hadisst.sel(time=slice('1982-01-01','2005-12-31')). \
                                 mean(dim=('time','longitude','latitude'))
ds_hadisst_offset = ds_hadisst_19822005 - ds_hadisst_19611990

if plot=='ssta':
# get anomalie by removing a climatology based on 1961-1990
    dsea_tmp1, sea_tmp1, beta = eo.deseason_harmonic(np.array(ds_hadisst_clim), \
                                                     2,12)
    dsea_hadisst = np.empty(len(ds_hadisst_mean))
    dsea_hadisst.fill(np.nan)
    for tt in range(0,len(ds_hadisst_mean),12):
        for mm in range(0,12):
            dsea_hadisst[tt+mm] = np.array(ds_hadisst_mean[tt+mm]) - sea_tmp1[mm]
#############################################
# runnind standard deviation
#############################################
hadisst_std_30 = np.empty(len(ds_hadisst_mean))
hadisst_std_30.fill(np.nan)
#hadissta_std_30 = np.empty(len(dsea_hadisst))
#hadissta_std_30.fill(np.nan)
w = 30
for tt in range(0,len(tim_vec_hadisst)-int(w/2)):
    hadisst_std_30[tt+int(w/2)] = np.nanstd(ds_hadisst_mean[tt:tt+w])
コード例 #3
0
#
icnt = 0
for i in lon_id:
    t_lon = time.time()
    print(icnt + 1, 'of', len(lon_map))
    #   load SST
    matobj = io.loadmat(header + 'timeseries/avhrr-only-v2.ts.' + \
                        str(i+1).zfill(4) + '.mat')

    ds_sst = xr.Dataset({'sst_ts':(('lat','time'), \
                                   matobj['sst_ts'][lat_id,:])}, \
#                                   matobj['sst_ts'][lat_id,str_mdl:end_mdl])}, \
                        {'time': time_vec,'lat': matobj['lat'][lat_id,0]})
    # deseason
    for j in range(0, len(lat_id)):
        tmp_sea,sea,beta = eo.deseason_harmonic(np.array(ds_sst['sst_ts'][j,:]) \
                                                ,4,365)
        ds_sst['sst_ts'][j, :] = np.array(tmp_sea)[:, 0]
        '''
# detrend the time series
        valid = ~np.isnan(ds_sst['sst_ts'][j,:])
        if (valid.any()==True):
            ds_sst['sst_ts'][j,:] = signal.detrend(ds_sst['sst_ts'][j,:],axis=0, \
                                                   type='linear')
        elif (valid.all()==False):
            ds_sst['sst_ts'][j,:] = np.nan
        '''
        '''
# detrend the time series -- not working with full nans!!!
    tmp_sst_det = signal.detrend(ds_sst['sst_ts'],axis=1,type='linear')
#ds_sst.reduce(signal.detrend,dim='time',type='linear')
# using xr.reduce, we loose the  time coordinate
コード例 #4
0
ds = xr.open_dataset(outfile)['sst']

SSTa = xr.Dataset(
    {
        'SSTa': (('time', 'lat', 'lon'),
                 np.zeros((ds.shape[0], ds.shape[1], ds.shape[2])))
    },
    coords={
        'time': ds.time,
        'lat': ds.lat.coords['lat'],
        'lon': ds.lon.coords['lon']
    })
tot_pts = len(ds.lat) * len(ds.lon)
k = 0
for ll in range(0, len(ds.lat)):
    for ln in range(0, len(ds.lon)):
        #        t_key = time.time()
        tmp_sea = eo.deseason_harmonic(np.array(ds[:, ll, ln]), 4, 365)
        SSTa['SSTa'][:, ll, ln] = np.array(tmp_sea)[:, 0]
    tmp = (k + len(ds.lon)) / tot_pts
    k = k + len(ds.lon)
    print('percentage of points processess:', tmp)
#        elapsed_key = time.time() - t_key
#        print('elapsed time for each key:', elapsed_key)

SSTa.to_netcdf(outfile2)

elapsed_all = time.time() - t_ini
print('elapsed time since start:', elapsed_all)
コード例 #5
0
    # (i+1) due to difference between matlab
    # and python indexing
    jcnt_id = 0
    for j in lat_id:
        #        t_lat = time.time()
        sst_ts = matobj['sst_ts'][j][:]
        # remove the 29 of Feb from the time series
        sst_ts_ = np.delete(sst_ts, idx_leap)
        # checking if continent or not -- does not handle empty matrix
        if ((np.isnan(np.min(sst_ts_)) == False) & \
            (np.isnan(np.max(sst_ts_)) == False)):

            # detrend the data -- using the linear least squares fit
            dsst = signal.detrend(sst_ts_, axis=0, type='linear')
            # deseasonned
            dsea_sst, season, beta = eo.deseason_harmonic(dsst, 6, 365)
            # allocate memory
            ## time series including only the chunks
            #            dsst_chunk=np.empty(tau*NumChunk)
            #            dsst_chunk.fill(np.nan)
            ## filtered chunk time series
            #            dsst_box=np.empty(len(dsst_chunk))
            #            dsst_box.fill(np.nan)
            # chunk mean allocation
            t_j = np.empty(NumChunk)
            t_j.fill(np.nan)
            # periodogram of each chunk
            pxx_j = np.empty([NumChunk, int(tau / 2) + 1])
            pxx_j.fill(np.nan)  # power spectrum of each chunk
            f_j = np.empty([NumChunk, int(tau / 2) + 1])
            f_j.fill(np.nan)  # corresponding frequencies