def operate(self, vid: str, variable: xa.DataArray) -> Dict[str, xa.DataArray]: """ Convenience method defined for this particular operation """ opSpecs = self.request['operation'] result_arrays: Dict[str, xa.DataArray] = {} for opSpec in opSpecs: opId = opSpec['name'].split(':')[1] opAxis = opSpec['axis'] coslat = np.cos(np.deg2rad(variable.coords['latitude'].values)) wgts = np.sqrt(coslat)[..., np.newaxis] solver = Eof(variable, weights=wgts) if opId == "correlation": result_arrays = solver.eofsAsCorrelation(neofs=1) elif opId == "covariance": result_arrays = solver.eofsAsCovariance(neofs=1) else: raise Exception(f"Unknown operation for EOF: '{opId}'") return result_arrays
# Read SST anomalies using the xarray module. The file contains November-March # averages of SST anomaly in the central and northern Pacific. filename = example_data_path('sst_ndjfm_anom.nc') sst = xr.open_dataset(filename)['sst'] # Create an EOF solver to do the EOF analysis. Square-root of cosine of # latitude weights are applied before the computation of EOFs. coslat = np.cos(np.deg2rad(sst.coords['latitude'].values)) wgts = np.sqrt(coslat)[..., np.newaxis] solver = Eof(sst, weights=wgts) # Retrieve the leading EOF, expressed as the correlation between the leading # PC time series and the input SST anomalies at each grid point, and the # leading PC time series itself. eof1 = solver.eofsAsCorrelation(neofs=1) pc1 = solver.pcs(npcs=1, pcscaling=1) # Plot the leading EOF expressed as correlation in the Pacific domain. clevs = np.linspace(-1, 1, 11) ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190)) fill = eof1[0].plot.contourf(ax=ax, levels=clevs, cmap=plt.cm.RdBu_r, add_colorbar=False, transform=ccrs.PlateCarree()) ax.add_feature(cfeature.LAND, facecolor='w', edgecolor='k') cb = plt.colorbar(fill, orientation='horizontal') cb.set_label('correlation coefficient', fontsize=12) ax.set_title('EOF1 expressed as correlation', fontsize=16) # Plot the leading PC time series. plt.figure() pc1[:, 0].plot(color='b', linewidth=2)
)['psl'] #units Pa psl = psl.sel(time=slice(start, end)) psl_obs = xr.open_dataset('processed_data/remap-woa09_psl_Amon_ERA-Int.nc')[ 'psl'] #units Pa psl_obs = psl_obs.sel(time=slice(start, end)) # In[246]: psl_sof20s = psl.sel(lat=slice(-90, -20)) psl_sof20s = psl_sof20s - psl_sof20s.mean(dim='time') coslat = np.cos(np.deg2rad(psl_sof20s.coords['lat'].values)).clip(0., 1.) wgts = np.sqrt(coslat)[..., np.newaxis] #psl_sof20s solver = Eof(psl_sof20s, weights=wgts) sh_eof = solver.eofsAsCorrelation(neofs=1) var_s = solver.varianceFraction(neigs=1) psl_sof20s_obs = psl_obs.sel(lat=slice(-90, -20)) psl_sof20s_obs = psl_sof20s_obs - psl_sof20s_obs.mean(dim='time') #psl_sof20s solver_obs = Eof(psl_sof20s_obs, weights=wgts) sh_eof_obs = solver_obs.eofsAsCorrelation(neofs=1) var_s_obs = solver_obs.varianceFraction(neigs=1) # In[247]: import iris import iris.coord_categorisation cube = iris.load_cube(
def eof_orca_latlon_box(run, var, modes, lon_bnds, lat_bnds, pathfile, plot, time, eoftype): if (var == 'temp'): key = 'votemper' key1 = "votemper" elif (var == 'sal'): key = 'vosaline' key1 = "vosaline" elif (var == 'MLD'): key = 'somxl010' key1 = "somxl010" # read data ds = xr.open_dataset(pathfile) #ds["time_counter"] = ds['time_counter']+(np.datetime64('0002-01-01')-np.datetime64('0001-01-01')) if time == 'comparison': ds = ds.sel(time_counter=slice('1958-01-01', '2006-12-31')) # cut box for EOF at surface if var == 'MLD': data = ds[key].sel(lon=slice(lon_bnds[0], lon_bnds[1]), lat=slice(lat_bnds[0], lat_bnds[1])) #data = cut_latlon_box(ds[key][:,:,:],ds.lon,ds.lat, # lon_bnds,lat_bnds) else: data = ds[key][:, 0, :, :].sel(lon=slice(lon_bnds[0], lon_bnds[1]), lat=slice(lat_bnds[0], lat_bnds[1])) #data = cut_latlon_box(ds[key][:,0,:,:],ds.lon,ds.lat, # lon_bnds,lat_bnds) data = data.to_dataset() # detrend data data[key1] = (['time_counter', 'lat', 'lon'], signal.detrend(data[key].fillna(0), axis=0, type='linear')) #data=data.where(data!=0) # remove seasonal cycle and drop unnecessary coordinates if 'time_centered' in list(data.coords): data = deseason_month(data).drop('month').drop( 'time_centered') # somehow pca doesn't work otherwise else: data = deseason_month(data).drop( 'month') # somehow pca doesn't work otherwise # set 0 values back to nan data = data.where(data != 0) # EOF analysis #Square-root of cosine of latitude weights are applied before the computation of EOFs. coslat = np.cos(np.deg2rad(data['lat'].values)) coslat, _ = np.meshgrid(coslat, np.arange(0, len(data['lon']))) wgts = np.sqrt(coslat) solver = Eof(data[key], weights=wgts.transpose()) pcs = solver.pcs(npcs=modes, pcscaling=1) if eoftype == 'correlation': eof = solver.eofsAsCorrelation(neofs=modes) elif eoftype == 'covariance': eof = solver.eofsAsCovariance(neofs=modes) else: eof = solver.eofs(neofs=modes) varfr = solver.varianceFraction(neigs=4) print(varfr) #----------- Plotting -------------------- plt.close("all") if plot == 1: for i in np.arange(0, modes): fig = plt.figure(figsize=(8, 2)) ax1 = fig.add_axes([0.1, 0.1, 0.3, 0.9], projection=ccrs.PlateCarree()) # main axes ax1.set_extent( (lon_bnds[0], lon_bnds[1], lat_bnds[0], lat_bnds[1])) # discrete colormap cmap = plt.get_cmap('RdYlBu', len(np.arange(10, 30)) - 1) #inferno similar to cmo thermal eof[i, :, :].plot(ax=ax1, cbar_kwargs={'label': 'Correlation'}, transform=ccrs.PlateCarree(), x='lon', y='lat', add_colorbar=True, cmap=cmap) gl = map_stuff(ax1) gl.xlocator = mticker.FixedLocator([100, 110, 120]) gl.ylocator = mticker.FixedLocator(np.arange(-35, -10, 5)) plt.text(116, -24, str(np.round(varfr[i].values, decimals=2)), horizontalalignment='center', verticalalignment='center', transform=ccrs.PlateCarree(), fontsize=8) ax2 = fig.add_axes([0.5, 0.1, 0.55, 0.9]) # main axes plt.plot(pcs.time_counter, pcs[:, i].values, linewidth=0.1, color='k') anomaly(ax2, pcs.time_counter.values, pcs.values[:, i], [0, 0]) ax2.set_xlim( [pcs.time_counter[0].values, pcs.time_counter[-1].values]) plt.savefig(pathplots + 'eof_as' + eoftype + '_mode' + str(i) + '_' + time + '_' + run + '_' + var + '.png', dpi=300, bbox_inches='tight', pad_inches=0.1) plt.show() #---------------------------------------------- return pcs, eof, varfr
from eofs.xarray import Eof # --- read netcdf file dset = xr.open_dataset('asstdt_pacific.nc') # --- select djf months sst = dset['sst'].sel(time=np.in1d(dset['time.month'], [1, 2, 12])) # --- square-root of cosine of latitude weights coslat = np.cos(np.deg2rad(sst.coords['lat'].values)) wgts = np.sqrt(coslat)[..., np.newaxis] # --- eof solver solver = Eof(sst, weights=wgts) # --- eof results eofs = solver.eofsAsCorrelation(neofs=2) pcs = solver.pcs(npcs=2, pcscaling=1) variance_fractions = solver.varianceFraction() north_test = solver.northTest(vfscaled=True) # --- spatial patterns fig, ax = plot.subplots(axwidth=5, nrows=2, tight=True, proj='pcarree', proj_kw={'lon_0': 180}) # --- format options ax.format(land=False, coast=True, innerborders=True, borders=True, large='15px', labels=False, latlim=(31, -31), lonlim=(119, 291), geogridlinewidth=0, abcloc='ul') # a) first EOF mode
def main(): ens = sys.argv[1] sYear = sys.argv[2] eYear = sys.argv[3] if int(sYear) < 1920: raise ValueError("Starting year must be 1920 or later.") if int(eYear) > 2100: raise ValueError("End year must be 2100 or earlier.") print("Computing NPGO for ensemble number " + ens + "...") filepath = ('/glade/scratch/rbrady/EBUS_BGC_Variability/' + 'global_residuals/SST/remapped/remapped.SST.' + ens + '.192001-210012.nc') print("Global residuals loaded...") ds = xr.open_dataset(filepath) ds = ds['SST'].squeeze() # Make time dimension readable through xarray. ds['time'] = pd.date_range('1920-01', '2101-01', freq='M') # Reduce to time period of interest. ds = ds.sel(time=slice(sYear + '-01', eYear + '-12')) # Slice down to Northeast Pacific domain. ds = ds.sel(lat=slice(25, 62), lon=slice(180, 250)) # Take annual JFM means. month = ds['time.month'] JFM = (month <= 3) ds_winter = ds.where(JFM).resample('A', 'time') # Compute EOF coslat = np.cos(np.deg2rad(ds_winter.lat.values)) wgts = np.sqrt(coslat)[..., np.newaxis] solver = Eof(ds_winter, weights=wgts, center=False) print("NPGO computed.") eof = solver.eofsAsCorrelation(neofs=2) variance = solver.varianceFraction(neigs=2) # Reconstruct the monthly index of SSTa by projecting # these values onto the annual PC timeseries. pseudo_pc = solver.projectField(ds, neofs=2, eofscaling=1) # Set up as dataset. ds = eof.to_dataset() ds['pc'] = pseudo_pc ds['variance_fraction'] = variance ds = ds.rename({'eofs': 'eof'}) ds = ds.sel(mode=1) # Invert to the proper values for the bullseye. if ds.sel(lat=45.5, lon=210).eof < 0: pass else: ds['eof'] = ds['eof'] * -1 ds['pc'] = ds['pc'] * -1 # Change some attributes for the variables. ds['eof'].attrs['long_name'] = 'Correlation between PC and JFM SSTa' ds['pc'].attrs['long_name'] = 'Principal component for NPGO' # Add a description of methods for clarity. ds.attrs[ 'description'] = 'Second mode of JFM SSTa variability over 25-62N and 180-110W.' ds.attrs[ 'anomalies'] = 'Anomalies were computed by removing the ensemble mean at each grid cell.' ds.attrs['weighting'] = ( 'The native grid was regridded to a standard 1deg x 1deg (180x360) grid.' + 'Weighting was computed via the sqrt of the cosine of latitude.') print("Saving to netCDF...") ds.to_netcdf('/glade/p/work/rbrady/NPGO/NPGO.' + ens + '.' + str(sYear) + '-' + str(eYear) + '.nc')