示例#1
0
def PDO(dat1, dat2):
    solver = Eof(dat1)
    pc1 = solver.pcs(npcs=1, pcscaling=1)
    sigs = solver.eigenvalues()
    eofs = solver.eofs()
    eof1 = eofs[0]
    # normalise sigs
    sigs1 = sigs / sigs.sum()
    # not used eof1 = solver.eofsAsCorrelation(neofs=1)
    #    pcs=solver.pcs()

    # filter first pdf
    Nt = dat1.time.size
    fp = fft.fft(pc1[:, 0])
    x = fft.fftfreq(Nt, 1 / 12.)  # cycles per year
    # lowpass filter at 0.1 per year
    i = abs(x) <= .1
    fp_fil = fp * i
    #plt.plot(x,abs(fp))
    #plt.plot(x,abs(fp_fil))
    pfil = fft.ifft(fp_fil)
    #
    pc1_fil = pc1[:, 0] * 0 + np.real(pfil)
    #plt.plot(pc1_fil)
    #print(pc1)
    print(pc1_fil)
    tmp = np.imag(pfil)
    print(tmp.max)
    # correlate with sst field

    astd, bstd, abstd, bhat = regresst(pc1_fil, dat2)
    r = abstd / (astd * bstd)
    slope = r * bstd

    return sigs1, eof1, slope, pc1, pc1_fil, x, fp_fil
示例#2
0
    def EOF_SST_analysis(self, xa, weights, n=1, fn=None):
        """ Empirical Orthogonal Function analysis of SST(t,x,y) field; from `SST.py` """
        assert type(xa)==xr.core.dataarray.DataArray
        assert type(weights)==xr.core.dataarray.DataArray
        assert 'time' in xa.dims
        assert np.shape(xa[0,:,:])==np.shape(weights)

        # anomalies by removing time mean
        xa = xa - xa.mean(dim='time')
        # Retrieve the leading EOF, expressed as the covariance between the leading PC
        # time series and the input xa anomalies at each grid point.
        solver = Eof(xa, weights=weights)
        eofs = solver.eofsAsCovariance(neofs=n)
        pcs  = solver.pcs(npcs=n, pcscaling=1)
        eigs = solver.eigenvalues(neigs=n)
        varF = solver.varianceFraction(neigs=n)
        ds = xr.merge([eofs, pcs, eigs, varF])
        if fn!=None:  ds.to_netcdf(fn)
        return ds
示例#3
0
# center = True if we want to remove the mean; =False if no need to remove the mean
'''
If *True*, the mean along the first axis of *dataset* (the
            time-mean) will be removed prior to analysis. If *False*,
            the mean along the first axis will not be removed. Defaults
            to *True* (mean is removed).
            The covariance interpretation relies on the input data being
            anomaly data with a time-mean of 0. Therefore this option
            should usually be set to *True*. Setting this option to
            *True* has the useful side effect of propagating missing
            values along the time dimension, ensuring that a solution
            can be found even if missing values occur in different
            locations at different times.
'''
lambdas = solver.eigenvalues()
vf = solver.varianceFraction()
Nerror = solver.northTest(vfscaled=True)
pcs = solver.pcs()  #(time, mode)
eofs = solver.eofsAsCovariance()
'''
plt.figure()
plt.subplot(3,2,1)
pcs[:, 0].plot()#color='b', linewidth=2)
ax = plt.gca()
ax.axhline(0, color='k')
ax.set_xlabel('Year')
ax.set_ylabel('PC1 amplitude')
plt.grid()
plt.subplot(3,2,2)
pcs[:, 1].plot()
import datetime
import os

# Read preprocessed data.
DATA_FILE = "/LFASGI/sandroal/data_sets/GIMMS/ppdata_ndvi.nc" 
DS = xr.open_dataset(DATA_FILE)

# Create an EOF solver to do the EOF analysis. Memory intensive operation.
solver = Eof(DS.ndvi)

# Retrieve EOFs, principal component time series, fraction of explained 
# variance, and eigenvalues as xarray DataArray objects for all modes.
EOFs = solver.eofs() 
PCs = solver.pcs()  
FRACs = solver.varianceFraction() 
EIGs = solver.eigenvalues() 

# Attributes for xarray DataSet objects.
attrs = {}
attrs["Description"] = "Empirical orthogonal functions to NDVI (GIMMS) " + \
                       "in its original temporal and spatial resolutions"
attrs["Build"] = "By Alex Araujo"
attrs["Date"] = datetime.datetime.now().strftime("%B %d, %Y; %Hh:%Mmin:%Ss")
attrs["Source"] = os.path.abspath(__file__)

# Set these attributes to results. Must transform from xarray DataArray to 
# DataSets before exporting results as netcdf files.
DAs = [EOFs, PCs, FRACs, EIGs]
names = ["eofs", "pcs", "fracs", "eigs"]
files = ["ppdata_ndvi_eofs_eofs.nc", 
         "ppdata_ndvi_eofs_pcs.nc",
示例#5
0
'''
for la in range(0,len(lat_obs)):
    for lo in range(0,len(lon_obs)):
        valid = ~np.isnan(sst_obs[:,la,lo])
        if (valid.any()==True):
            sst_obs[:,la,lo] = signal.detrend(sst_obs[:,la,lo], axis=0, \
                                               type='linear')
        elif (valid.all()==False):
            sst_obs[:,la,lo] = np.nan
'''

# EOF for model
coslat_mdl = np.cos(np.deg2rad(sst_mdl.coords['lat'].values))
wgts_mdl = np.sqrt(coslat_mdl)[..., np.newaxis]
solver_mdl = Eof(sst_mdl, weights=wgts_mdl, center=True)
lambdas_mdl=solver_mdl.eigenvalues()
vf_mdl = solver_mdl.varianceFraction()
Nerror_mdl = solver_mdl.northTest(vfscaled=True)
pcs_mdl = solver_mdl.pcs() #(time, mode)
eofs_mdl = solver_mdl.eofs()
# EOF for obs
coslat_obs = np.cos(np.deg2rad(sst_obs.coords['lat'].values))
wgts_obs = np.sqrt(coslat_obs)[..., np.newaxis]
solver_obs = Eof(sst_obs, weights=wgts_obs, center=True)
lambdas_obs=solver_obs.eigenvalues()
vf_obs = solver_obs.varianceFraction()
Nerror_obs = solver_obs.northTest(vfscaled=True)
pcs_obs = solver_obs.pcs() #(time, mode)
eofs_obs = solver_obs.eofs()

示例#6
0
    varmin

    import xarray as xray
    uvmat = xray.concat((umeana, vmeana), dim='uv')

    from eofs.xarray import Eof
    solver = Eof(uvmat.isel(distance=ii).T)

    evec1x = solver.eofs(neofs=1)[0][0].values
    evec1y = solver.eofs(neofs=1)[0][1].values

    maxtheta_rad = arctan(evec1y / evec1x)
    maxtheta = arctan(evec1y / evec1x) * 180 / pi
    maxtheta

    eig1 = solver.eigenvalues()[0].values
    eig2 = solver.eigenvalues()[1].values
    maxvar = 2 * sqrt(5.991 * eig1)
    minvar = 2 * sqrt(5.991 * eig2)

    fig, ax = plt.subplots(subplot_kw={'aspect': 'equal'})
    hist2d(umean[ii, :], vmean[ii, :], 30)
    colorbar()
    e = Ellipse(xy=hstack(
        (umean.mean(dim='date')[ii], vmean.mean(dim='date')[ii])),
                width=maxvar,
                height=minvar,
                angle=maxtheta,
                edgecolor='w',
                facecolor='none',
                lw=3)
def LFCA(da, N=30, L=1/10, fs=12, order=3, landmask=None, monthly=True):
    """Perform LFCA (as per Wills et al, 2018, GRL) on a dataarray.

    Parameters
    ----------
    da : xarray.DataArray
        Data to perform LFCA on (time x lat x lon)
    N : int
        Number of EOFs to retain
    L : float
        Cutoff frequency for lowpass filter (e.g. 1/10 for per decade)
    fs : float
        Sampling frequency (1/12 for monthly)
    order : int
        Order of the Butterworth filter
    landmask : xarray.DataArray or None
        If None, do not perform any masking
        If DataArray, indicates land locations
    monthly : bool
        If True, perform lowpass filtering for each month separately

    Returns
    -------
    LFPs : numpy.ndarray
        2D array of N spatial patterns (nlat*nlon x N)
    LFCs : numpy.ndarray
        2D array of N time series (ntime x N)

    """

    from eofs.xarray import Eof

    # remove empirical seasonal cycle
    da = da.groupby('time.month') - da.groupby('time.month').mean('time')

    ntime, nlat, nlon = da.shape

    if landmask is not None:

        # expand land mask to ntime
        lnd_mask = np.repeat(is_land.values[np.newaxis, :, :], ntime, axis=0)
        da = da.where(lnd_mask)

    coslat = np.cos(np.deg2rad(da['lat'].values)).clip(0., 1.)
    wgts = np.sqrt(coslat)[..., np.newaxis]
    solver = Eof(da, weights=wgts)

    eofs = solver.eofs(eofscaling=0)  # normalized st L2 norm = 1
    eigenvalues = solver.eigenvalues()

    # Low pass filter data
    if monthly:
        fs = 1

    nyq = 0.5 * fs  # Nyquist frequency
    low = L / nyq
    sos = butter(order, low, btype='low', output='sos')  # Coefficients for Butterworth filter
    if monthly:
        X_tilde = np.empty((da.shape))
        for kk in range(12):
            X_tilde[kk::12, :, :] = sosfiltfilt(sos, da.values[kk::12, :, :], padtype='even', axis=0)

    else:
        X_tilde = sosfiltfilt(sos, da.values, axis=0)

    a_k = eofs.values[:N, :, :].reshape((N, nlat*nlon))
    sigma_k = np.sqrt(eigenvalues.values[:N])

    if landmask is not None:
        lnd_mask_vec = is_land.values.flatten()
    else:
        lnd_mask_vec = np.ones((nlat*nlon,), dtype=bool)

    PC_tilde = np.empty((ntime, N))
    for kk in range(N):
        PC_tilde[:, kk] = 1/sigma_k[kk]*np.dot(X_tilde.reshape((ntime, nlat*nlon))[:, lnd_mask_vec],
                                               a_k[kk, lnd_mask_vec])

    R = np.dot(PC_tilde.T, PC_tilde)/(N - 1)
    R_eigvals, e_k = np.linalg.eig(R)  # eigenvalues already sorted

    # eigenvalues are in columns
    u_k = np.dot((a_k.T)/sigma_k, e_k)
    LFPs = np.dot(sigma_k*(a_k.T), e_k)

    # Time series:
    LFCs = np.dot(da.values.reshape((ntime, nlat*nlon))[:, lnd_mask_vec], u_k[lnd_mask_vec, :])

    return LFPs, LFCs