コード例 #1
0
    def EOF_projection(self, nEOFs, store_EOFs=True):
        from eofs.iris import Eof

        weighting_type = 'coslat'  #this weighting ensures the variance of each gridpoint is area weighted
        solver = Eof(self.data, weights=weighting_type)
        self.PCs = solver.pcs(npcs=nEOFs)
        self.history.append(f"Leading {nEOFs} PCs calculated.")
        if store_EOFs:
            self.EOFs = solver.eofs(eofscaling=1, neofs=nEOFs)
            self.history.append(f"Leading {nEOFs} EOFs calculated.")
コード例 #2
0
def NAO_EOFs(SLP, neofs, time, months):
	SLP_NAO = SLP.extract(lat_constraint_EOF & lon_constraint_EOF)
	SLP_djf = SLP_NAO.extract(iris.Constraint(month = months))
	SLP_djf = SLP_djf.aggregated_by('year', iris.analysis.MEAN)
	mean = SLP_djf.collapsed(time, iris.analysis.MEAN)
	stdev = SLP_djf.collapsed(time, iris.analysis.STD_DEV)
	SLP_djf = (SLP_djf - mean)/stdev
	solver = Eof(SLP_djf, weights = 'coslat')
	eof = solver.eofs(neofs=neofs)
	NAO_djf = solver.pcs(npcs=neofs, pcscaling=1)[:,0]
	return NAO_djf, eof
コード例 #3
0
ファイル: utils.py プロジェクト: sureL89/pySDP
def eof_pc_modes(cube, fraction_explained, neofs_pred=None, show_info=False):
    n_eofs = 0
    n_fraction = 0
    solver = Eof(cube, weights='coslat')
    if neofs_pred is None:
        # Number of EOFs needed to explain the fraction (fraction_explained) of the total variance
        while n_fraction < fraction_explained:
            n_eofs = n_eofs+1
            cube.eof_var = solver.varianceFraction(neigs=n_eofs)
            n_fraction = np.sum(cube.eof_var.data)
        cube.eof = solver.eofs(neofs=n_eofs)
        cube.pcs = solver.pcs(npcs=n_eofs)
        cube.solver = solver
        cube.neofs = n_eofs
    else:
        cube.eof = solver.eofs(neofs=neofs_pred)
        cube.pcs = solver.pcs(npcs=neofs_pred)
        cube.solver = solver
        cube.neofs = neofs_pred

    # Function return
    if show_info:
        for i in range(0,n_eofs):
            print('EOF '+str(i+1)+' fraction: '+str("%.2f" % (cube.eof_var.data[i]*100))+'%')
        print(str("%.2f" % (n_fraction*100))+'% of the total variance explained by '+str(n_eofs)+' EOF modes.')
        return cube
    elif show_info == False:
        return cube
    else:
        print 'Missing show_info=True or show_info=False'
コード例 #4
0
ファイル: eof.py プロジェクト: pcosbsc/scripts_UB
iris.coord_categorisation.add_year(sst, 'time', name='year')
yr_sst = sst.aggregated_by('year', iris.analysis.MEAN)
clim_sst = yr_sst.collapsed('time', iris.analysis.MEAN)

# yr_sst_detrend = detrend(yr_sst)
# clim_sst_detrend = yr_sst_detrend.collapsed('time', iris.analysis.MEAN)

anom_sst = yr_sst - clim_sst
anom_sst_detrend = detrend(anom_sst)



# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
# sovler = Eof(anom_sst_detrend) #--> NO FUNCIONA PQ yr_sst_detrend SEMBLA QUE JA ESTÀ FETA LA ANOMALIA. BUENO CLAR PUTO,, SI LI TREUS LA REGRESSIÓ LINEAL ET CARREGUES TOT.
solver = Eof(anom_sst, weights='coslat')

# Retrieve the leading EOF, expressed as the correlation between the leading
# PC time series and the input SST anomalies at each grid point, and the
# leading PC time series itself.
n = 3
# eofs = solver.eofsAsCorrelation(neofs=n)
eofs = solver.eofsAsCorrelation(neofs=n)
pcs = solver.pcs(npcs=n)
variance_fractions = solver.varianceFraction(neigs=n)
print(variance_fractions.data)

for i in range(n):
    plotter(eofs[i],pcs[:,i],variance_fractions.data[i]*100,i+1)

# plt.show()
コード例 #5
0
ファイル: regr_eof.py プロジェクト: pcosbsc/scripts_UB
# lat_bds = [20, 70]
# lon_bds = [120, 250]
# SP
# lat_bds = [-70, -20]
# lon_bds = [135, 300]
# TP
# lat_bds = [0, 90]
# lon_bds = [0, 360]
# sst = extract_region(cube, lat_bds, lon_bds)
if 1 == 1:
    # anom_sst_detrend = anomaly(sst_cube, sst=True)
    anom_slp_detrend = anomaly(slp_cube)

    # # Create an EOF solver to do the EOF analysis. Square-root of cosine of
    # # latitude weights are applied before the computation of EOFs.
    solver = Eof(anom_slp_detrend, weights='coslat')
    # Retrieve the leading EOF, expressed as the correlation between the leading
    # PC time series and the input SST anomalies at each grid point, and the
    # leading PC time series itself.
    n = 3
    # eofs = solver.eofsAsCorrelation(neofs=n)
    eofs = solver.eofs(neofs=n)
    print(eofs)
    pcs = solver.pcs(npcs=n)
    pc0 = pcs.coord('year').points
    pc1 = pcs[:, 0].data / np.std(pcs[:, 0].data)
    pc2 = (pcs[:, 1].data * -1) / np.std(pcs[:, 1].data)
    pc3 = pcs[:, 2].data / np.std(pcs[:, 2].data)
    pcs_np = np.array([pc1, pc2, pc3])

    variance_fractions = solver.varianceFraction(neigs=n)
コード例 #6
0
ファイル: hgt_example.py プロジェクト: gavin971/eofs

# Read geopotential height data using the iris module. The file contains
# December-February averages of geopotential height at 500 hPa for the
# European/Atlantic domain (80W-40E, 20-90N).
filename = example_data_path('hgt_djf.nc')
z_djf = iris.load_cube(filename)

# Compute anomalies by removing the time-mean.
with warnings.catch_warnings():
    # Iris emits a warning due to the non-contiguous time dimension.
    warnings.simplefilter('ignore', UserWarning)
    z_djf_mean = z_djf.collapsed('time', iris.analysis.MEAN)
z_djf.data = z_djf.data - z_djf_mean.data

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
solver = Eof(z_djf, weights='coslat')

# Retrieve the leading EOF, expressed as the covariance between the leading PC
# time series and the input SLP anomalies at each grid point.
eof1 = solver.eofsAsCovariance(neofs=1)

# Plot the leading EOF expressed as covariance in the European/Atlantic domain.
ax = plt.axes(projection=ccrs.Orthographic(central_longitude=-20, central_latitude=60))
iplt.contourf(eof1[0, 0], cmap=plt.cm.RdBu_r)
ax.coastlines()
ax.set_global()
ax.set_title('EOF1 expressed as covariance', fontsize=16)
plt.show()
コード例 #7
0
ファイル: sst_example.py プロジェクト: nicolasfauchereau/eofs
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np

from eofs.iris import Eof
from eofs.examples import example_data_path


# Read SST anomalies using the iris module. The file contains November-March
# averages of SST anomaly in the central and northern Pacific.
filename = example_data_path('sst_ndjfm_anom.nc')
sst = iris.load_cube(filename)

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
solver = Eof(sst, weights='coslat')

# Retrieve the leading EOF, expressed as the correlation between the leading
# PC time series and the input SST anomalies at each grid point, and the
# leading PC time series itself.
eof1 = solver.eofsAsCorrelation(neofs=1)
pc1 = solver.pcs(npcs=1, pcscaling=1)

# Plot the leading EOF expressed as correlation in the Pacific domain.
clevs = np.linspace(-1, 1, 11)
ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190))
fill = iplt.contourf(eof1[0], clevs, cmap=plt.cm.RdBu_r)
ax.add_feature(cartopy.feature.LAND, facecolor='w', edgecolor='k')
cb = plt.colorbar(fill, orientation='horizontal')
cb.set_label('correlation coefficient', fontsize=12)
ax.set_title('EOF1 expressed as correlation', fontsize=16)
コード例 #8
0
ファイル: hgt_example.py プロジェクト: zhe233/eofs
# Read geopotential height data using the iris module. The file contains
# December-February averages of geopotential height at 500 hPa for the
# European/Atlantic domain (80W-40E, 20-90N).
filename = example_data_path('hgt_djf.nc')
z_djf = iris.load_cube(filename)

# Compute anomalies by removing the time-mean.
with warnings.catch_warnings():
    # Iris emits a warning due to the non-contiguous time dimension.
    warnings.simplefilter('ignore', UserWarning)
    z_djf_mean = z_djf.collapsed('time', iris.analysis.MEAN)
z_djf = z_djf - z_djf_mean

# Create an EOF solver to do the EOF analysis. Square-root of cosine of
# latitude weights are applied before the computation of EOFs.
solver = Eof(z_djf, weights='coslat')

# Retrieve the leading EOF, expressed as the covariance between the leading PC
# time series and the input SLP anomalies at each grid point.
eof1 = solver.eofsAsCovariance(neofs=1)

# Plot the leading EOF expressed as covariance in the European/Atlantic domain.
clevs = np.linspace(-75, 75, 11)
proj = ccrs.Orthographic(central_longitude=-20, central_latitude=60)
ax = plt.axes(projection=proj)
ax.coastlines()
ax.set_global()
iplt.contourf(eof1[0, 0], levels=clevs, cmap=plt.cm.RdBu_r)
ax.set_title('EOF1 expressed as covariance', fontsize=16)
plt.show()
コード例 #9
0
ファイル: solutions8abc.py プロジェクト: crymt0522/Course
zannc = gph.aggregated_by(['clim_month'], iris.analysis.MEAN)
zrept = gph.copy()
for yy in range(58): zrept.data[yy*12:yy*12+12,:,:] = zannc.data

# calculate anomalies from mean annual cycle -- also for 8.1(c)
zanom = iris.analysis.maths.subtract(gph,zrept)

# extract 1979-2000 anomalies
iris.coord_categorisation.add_year(zanom, 'time', name='year')
zbase = zanom.extract(iris.Constraint(coord_values={'year':sclm}))

#====================================================================
# 8.1(b)

# calculate EOFs
slvr = Eof(zbase, weights='coslat')
eof1 = slvr.eofsAsCovariance(neofs=1)
pc1  = iris.util.squeeze(slvr.pcs(npcs=1))
vfrc = slvr.varianceFraction(neigs=5)

# get standard deviation of pc1
stdv = pc1.collapsed('time', iris.analysis.STD_DEV)
npc1 = iris.analysis.maths.divide(pc1, stdv)

# plot
clev = np.linspace(-50,50,11)
fg = plt.figure(figsize=(8,8))
ax = plt.axes(projection=ccrs.Orthographic(central_longitude=180, central_latitude=90))
cs = iplt.contourf(eof1[0,0], levels=clev, cmap=plt.cm.RdBu_r)
ax.coastlines()
ax.set_global()
コード例 #10
0
ファイル: EOFs.py プロジェクト: ellgil82/Hindcast
def calc_eofs(input, neofs):
    mn, anom = rmv_mn(input)
    solver = Eof(anom, weights = 'coslat')
    eofs = solver.eofs(neofs = neofs)
    pcs = solver.pcs(npcs = neofs)
    return solver, eofs, pcs