def soilmoisture_fingerprints(mask,name=None): depths = ["30cm","2m","pdsi"] letters = ["(a): ","(b): ","(c): "] pcs = [] pclabels = [] for depth in depths: i=depths.index(depth) plt.subplot(2,2,i+1) sm = soilmoisture(depth,mask=mask) solver = Eof(MV.average(sm,axis=0)) fac = da.get_orientation(solver) if name is None: m=landplot(fac*solver.eofs()[0],vmin=-.1,vmax=.1) plt.colorbar(orientation='horizontal',label='EOF loading') else: m=plot_regional(fac*solver.eofs()[0],name,vmin=-.1,vmax=.1) m.drawcountries() m.drawcoastlines(color='gray') plt.title(letters[i]+depth+" fingerprint") pcs+=[fac*solver.pcs()[:,0]] plt.subplot(2,2,4) for i in range(3): time_plot(pcs[i],label=depths[i]) plt.legend(loc=0) plt.title("(d): Principal Components") plt.xlabel("Time") plt.ylabel("Temporal amplitude")
def get_EOFs(var1, num=3, scaling=2): lat = var1.getAxis(1) lon = var1.getAxis(2) var = MV.array(var1 - np.mean(var1, axis=0)) var.setAxis(1, lat) var.setAxis(2, lon) solver = Eof(var, weights='area') eofs = solver.eofs(neofs=num, eofscaling=scaling) pc = solver.pcs(npcs=num, pcscaling=scaling) vari = solver.varianceFraction(num) eigv = solver.eigenvalues(num) return eofs, pc, vari, eigv
def soilmoisture_fingerprints(mask, name=None, fortalk=False): Fingerprints = {} depths = ["pdsi", "30cm", "2m"] if fortalk: letters = ["", "", ""] else: letters = ["(a): ", "(b): ", "(c): "] pcs = [] pclabels = [] for depth in depths: i = depths.index(depth) if fortalk: plt.figure() else: plt.subplot(2, 2, i + 1) sm = soilmoisture(depth, mask=mask) solver = Eof(MV.average(sm, axis=0), weights='area') Fingerprints[depth] = solver fac = da.get_orientation(solver) if name is None: m = b.landplot(fac * solver.eofs()[0], vmin=-.1, vmax=.1) plt.colorbar(orientation='horizontal', label='EOF loading') else: m = b.plot_regional(fac * solver.eofs()[0], name, vmin=-.1, vmax=.1) m.drawcountries() m.drawcoastlines(color='gray') if depth is not "pdsi": plt.title(letters[i] + depth + " fingerprint") else: plt.title(letters[i] + " PDSI fingerprint") pcs += [fac * solver.pcs()[:, 0]] if fortalk: plt.figure() else: plt.subplot(2, 2, 4) for i in range(3): if depths[i] == "pdsi": label = "PDSI" else: label = depths[i] time_plot(pcs[i], label=label, lw=3, color=cm.copper(i / 2.)) plt.legend(loc=0) plt.title("(d): Principal Components") plt.xlabel("Time") plt.ylabel("Temporal amplitude") plt.xlim(1900, 2100) return Fingerprints
# Set time period --- start_year = 1980 end_year = 2000 start_time = cdtime.comptime(start_year) end_time = cdtime.comptime(end_year) # Load variable --- d = f('sst',time=(start_time,end_time),longitude=(0,360),latitude=(-90,90)) # Provide proper variable name # Reomove annual cycle --- d_anom = cdutil.ANNUALCYCLE.departures(d) # EOF (take only first variance mode...) --- solver = Eof(d_anom, weights='area') eof = solver.eofsAsCovariance(neofs=1) pc = solver.pcs(npcs=1, pcscaling=1) # pcscaling=1: scaled to unit variance # (divided by the square-root of their eigenvalue) frac = solver.varianceFraction() # Sign control if needed --- eof = eof * -1 pc = pc * -1 #=========================================================================================================== # Plot #----------------------------------------------------------------------------------------------------------- # Create canvas --- canvas = vcs.init(geometry=(900,800)) canvas.open() template = canvas.createtemplate()
class DroughtAtlas(): def __init__(self, name, cutoff='0001-1-1'): if name.find("2.5") >= 0: self.name = name.split("2.5")[0] else: self.name = name #if name.find("+")<0: f = cdms.open("../DROUGHT_ATLAS/PROCESSED/" + name + ".nc") obs = f("pdsi") self.obs = MV.masked_where(np.isnan(obs), obs) self.obs = MV.masked_where(np.abs(self.obs) > 90, self.obs) self.obs = self.obs(time=(cutoff, '2020-12-31')) self.obs = mask_data( self.obs, self.obs.mask[0] ) #Make all the obs have the same mask as the first datapoint f.close() fm = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi." + name + ".hist.rcp85.nc") self.model = get_rid_of_bad(fm("pdsi")) self.model = MV.masked_where(np.isnan(self.model), self.model) fm.close() # else: #DEPRECATED: MERGE observations onto common grid using old code # name1,name2=name.split("+") # f1 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name1+".nc") # obs1 = f1("pdsi") # obs1 = MV.masked_where(np.isnan(obs1),obs1) # obs1 = MV.masked_where(np.abs(obs1)>90,obs1) # obs1 = obs1(time=(cutoff,'2017-12-31')) # obs1=mask_data(obs1,obs1.mask[0]) # f1.close() # fm1 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name1+".hist.rcp85.nc") # model1=get_rid_of_bad(fm1("pdsi")) # model1=MV.masked_where(np.isnan(model1),model1) # fm1.close() # f2 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name2+".nc") # obs2 = f2("pdsi") # obs2 = MV.masked_where(np.isnan(obs2),obs2) # obs2 = MV.masked_where(np.abs(obs2)>90,obs2) # obs2 = obs2(time=(cutoff,'2017-12-12')) # obs2=mask_data(obs2,obs2.mask[0]) # f2.close() # fm2 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name2+".hist.rcp85.nc") # model2=get_rid_of_bad(fm2("pdsi")) # model2=MV.masked_where(np.isnan(model2),model2) # fm2.close() # self.obs=merge.merge(obs1,obs2) # self.model=merge.merge(model1,model2) mma = MV.average(self.model, axis=0) self.mma = mask_data( mma, self.obs[0].mask) #make all the models have the same mask self.solver = Eof(self.mma, weights='area') self.eofmask = self.solver.eofs()[0].mask self.fac = da.get_orientation(self.solver) self.projection = self.solver.projectField( mask_data(self.obs, self.eofmask))[:, 0] * self.fac self.noise = self.projection(time=('1-1-1', '1850-1-1')) self.P = self.model_projections() def get_noise(self, solver=None): if solver is None: return self.noise else: proj = solver.projectField(mask_data( self.obs, self.eofmask))[:, 0] * da.get_orientation(solver) noise = proj(time=('1-1-1', '1850-1-1')) return noise def get_forced(self, solver=None): if solver is None: return self.P else: return self.model_projections(solver=solver) def get_tree_ring_projection(self, solver=None): if solver is None: return self.projection else: fac = da.get_orientation(solver) projection = solver.projectField(mask_data( self.obs, self.eofmask))[:, 0] * fac return projection def plot_fingerprint(self, ax1=None, ax2=None): eof1 = self.solver.eofs()[0] * self.fac #v=max([np.abs(np.ma.min(eof1)),np.abs(np.ma.max(eof1))]) pc1 = self.solver.pcs()[:, 0] * self.fac if ax1 is None: ax1 = plt.subplot(121) if self.name not in ["OWDA", "MXDA", "NADA", "MADA", "ANZDA"]: #m=bmap(eof1,cmap=cm.BrBG,vmin=-v,vmax=v) m = landplot(eof1) m.drawcoastlines() plt.colorbar(orientation="horizontal", label="EOF loading") else: m = plot_regional(self.solver.eofs()[0], self.name, cmap=cm.BrBG) m.drawcoastlines() plt.subplot(122) time_plot(pc1) def model_projections(self, solver=None): if solver is None: make_own_solver = True else: make_own_solver = False if solver is None: to_proj = mask_data(self.model, self.solver.eofs()[0].mask) else: to_proj = cmip5.cdms_clone(MV.filled(self.model, fill_value=0), self.model) to_proj = mask_data(to_proj, solver.eofs()[0].mask) P = MV.zeros(to_proj.shape[:2]) for i in range(to_proj.shape[0]): tp = to_proj[i] if make_own_solver: mma_mask = mask_data(self.mma, tp[0].mask) solver = Eof(mma_mask, weights='area') fac = da.get_orientation(solver) P[i] = solver.projectField(tp)[:, 0] * fac P.setAxisList(to_proj.getAxisList()[:2]) return P #self.P=P def sn_at_time(self, start_time, L, overlapping=True, noisestart=None, solver=None): if noisestart is None: noisestart = cmip5.start_time(self.obs) noisestop = cmip5.stop_time(self.get_noise(solver=solver)) stop_time = start_time.add(L, cdtime.Years) modslopes = cmip5.get_linear_trends( self.get_forced(solver=solver)(time=(start_time, stop_time))) if overlapping: noiseterm = bootstrap_slopes( self.get_noise(solver=solver)(time=(noisestart, noisestop)), L) else: noiseterm = da.get_slopes( self.get_noise(solver=solver)(time=(noisestart, noisestop)), L) / 365. return modslopes, noiseterm def obs_SN(self, start_time, stop_time=None, overlapping=True, include_trees=True, include_dai=False, include_cru=False, include_piControl=False, noisestart=None, solver=None, plot=True): to_return = {} if stop_time is None: stop_time = cmip5.stop_time(self.get_tree_ring_projection()) target_obs = self.get_tree_ring_projection(solver=solver)( time=(start_time, stop_time)) L = len(target_obs) modslopes, noiseterm = self.sn_at_time(start_time, L, overlapping=True, noisestart=noisestart, solver=solver) ns = np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) if plot: plt.hist(modslopes / ns, 20, normed=True, color=get_dataset_color("h85"), alpha=.5) lab = str(start_time.year) + "-" + str(stop_time.year) da.fit_normals_to_data(modslopes / ns, color=get_dataset_color("h85"), lw=1, label="H85") plt.hist(noiseterm / ns, 20, normed=True, color=get_dataset_color("tree_noise"), alpha=.5) da.fit_normals_to_data(noiseterm / ns, color=get_dataset_color("tree_noise"), lw=1, label="Pre-1850 tree rings") if include_trees: percentiles = [] if plot: plt.axvline(signal / ns, color=get_dataset_color("tree"), lw=1, label=lab + " GDA trend") print signal / ns noise_percentile = stats.percentileofscore(noiseterm.tolist(), signal) h85_percentile = stats.percentileofscore(modslopes.tolist(), signal) percentiles += [noise_percentile, h85_percentile] to_return["trees"] = [signal / ns] + percentiles if include_dai: daipercentiles = [] dai_proj = self.project_dai_on_solver(start=start_time, solver=solver) daitrend = float( cmip5.get_linear_trends(dai_proj(time=(start_time, stop_time)))) daisignal = daitrend / ns noise_percentile = stats.percentileofscore(noiseterm.tolist(), daitrend) h85_percentile = stats.percentileofscore(modslopes.tolist(), daitrend) daipercentiles += [noise_percentile, h85_percentile] if plot: plt.axvline(daisignal, color=get_dataset_color("dai"), lw=1, label="Dai") print "DAI signal/noise is " + str(daisignal) to_return["dai"] = [daitrend / ns] + daipercentiles if include_cru: crupercentiles = [] cru_proj = self.project_cru_on_solver(start=start_time, solver=solver) crutrend = float( cmip5.get_linear_trends(cru_proj(time=(start_time, stop_time)))) noise_percentile = stats.percentileofscore(noiseterm.tolist(), crutrend) h85_percentile = stats.percentileofscore(modslopes.tolist(), crutrend) crupercentiles += [noise_percentile, h85_percentile] crusignal = crutrend / ns if plot: plt.axvline(crusignal, color=get_dataset_color("cru"), lw=1, label="CRU") print "CRU signal/noise is " + str(crusignal) to_return["cru"] = [crutrend / ns] + crupercentiles if include_piControl: p = self.project_piControl_on_solver(solver=solver) noiseterm_mod = bootstrap_slopes(p, L) if plot: plt.hist(noiseterm_mod / ns, 20, normed=True, color=get_dataset_color("picontrol"), alpha=.5) da.fit_normals_to_data(noiseterm_mod / ns, color=get_dataset_color("picontrol"), lw=1, label="PiControl") print "relative to model noise:" print float(signal) / np.std(noiseterm_mod) # percentiles+=[stats.percentileofscore(noiseterm_mod.tolist(),signal)] if plot: plt.legend(loc=0) plt.xlabel("S/N") plt.ylabel("Normalized Frequency") return to_return def for_figure_4(self, start_time, stop_time=None, overlapping=True, include_trees=True, include_dai=False, include_cru=False, include_piControl=False, noisestart=None, solver=None): data = {} if stop_time is None: stop_time = cmip5.stop_time(self.get_tree_ring_projection()) target_obs = self.get_tree_ring_projection()(time=(start_time, stop_time)) L = len(target_obs) modslopes, noiseterm = self.sn_at_time(start_time, L, overlapping=True, noisestart=noisestart, solver=solver) ns = np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) data["noise"] = noiseterm data["modslopes"] = modslopes data["tree_rings"] = signal if include_dai: dai_proj = self.project_dai_on_solver(start=start_time, solver=solver) daitrend = cmip5.get_linear_trends( dai_proj(time=(start_time, stop_time))) data["dai"] = daitrend if include_cru: cru_proj = self.project_cru_on_solver(start=start_time, solver=solver) crutrend = cmip5.get_linear_trends( cru_proj(time=(start_time, stop_time))) data["cru"] = crutrend if include_piControl: p = self.project_piControl_on_solver(solver=solver) noiseterm_mod = bootstrap_slopes(p, L) data["picontrol"] = noiseterm_mod return data def time_of_emergence(self, start_time, times=np.arange(10, 76), noisestart=None, plot=True, solver=None, uncertainty="lines", **kwargs): if noisestart is None: noisestart = cmip5.start_time(self.obs) if not hasattr(self, "P"): self.model_projections() nmod, nyears = self.get_forced(solver=solver).shape self.TOE = MV.zeros((nmod, len(times))) for i in range(len(times)): L = times[i] modslopes, noiseterm = self.sn_at_time(start_time, L, noisestart=noisestart) sns = modslopes / np.std(noiseterm) self.TOE[:, i] = sns self.TOE.setAxis(0, self.get_forced(solver=solver).getAxis(0)) if plot: endyears = start_time.year + times if uncertainty == "lines": for ind_model in self.TOE.asma(): plt.plot(endyears, ind_model, alpha=.3, color=cm.Greys(.5), lw=1) elif uncertainty == "bounds": plt.plot(endyears, np.ma.min(self.TOE.asma(), axis=0), linestyle="--", **kwargs) plt.plot(endyears, np.ma.max(self.TOE.asma(), axis=0), linestyle="--", **kwargs) else: plt.fill_between(endyears, np.ma.min(self.TOE.asma(), axis=0), np.ma.max(self.TOE.asma(), axis=0), alpha=.2, **kwargs) plt.plot(endyears, np.ma.average(self.TOE.asma(), axis=0), label=self.name + " model mean signal", **kwargs) #plt.fill_between(endyears,np.ma.min(self.TOE.asma(),axis=0),np.ma.max(self.TOE.asma(),axis=0),alpha=.3,**kwargs) #plt.axhline(stats.norm.interval(.9)[-1],c="r",lw=3) plt.xlabel("Trend end year") plt.ylabel("Signal-to-noise ratio") def project_dai_on_solver(self, start='1970-1-1', solver=None): f = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/DAI_selfcalibrated.nc") dai_jja = f("pdsi") f.close() dai_jja_mask = mask_data(dai_jja, self.obs[0].mask)(time=(start, '2018-12-31')) newmask = np.prod(~dai_jja_mask.mask, axis=0) dai_jja_mask = mask_data(dai_jja_mask, newmask == 0) if solver is None: solver = Eof(mask_data(self.mma, newmask == 0), weights='area') dai_jja_mask = mask_data(dai_jja_mask, solver.eofs()[0].mask) fac = da.get_orientation(solver) return solver.projectField(dai_jja_mask)[:, 0] * fac def project_cru_on_solver(self, start='1970-1-1', solver=None): f = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/CRU_selfcalibrated.nc") cru_jja = f("pdsi") f.close() cru_jja_mask = mask_data(cru_jja, self.obs[0].mask)(time=(start, '2018-12-31')) newmask = np.prod(~cru_jja_mask.mask, axis=0) cru_jja_mask = mask_data(cru_jja_mask, newmask == 0) if solver is None: solver = Eof(mask_data(self.mma, newmask == 0), weights='area') cru_jja_mask = mask_data(cru_jja_mask, solver.eofs()[0].mask) fac = da.get_orientation(solver) return solver.projectField(cru_jja_mask)[:, 0] * fac def project_piControl_on_solver(self, solver=None): direc = "/Volumes/Marvel/PICTRL/PDSI_REGRIDDED_SUMMER/" files = glob.glob(direc + "*") npiC = len(files) fname = files[0] f = cdms.open(fname) piC_pdsi_regrid = f("pdsi_summer") piC_pdsi_regrid = MV.masked_where(np.isnan(piC_pdsi_regrid), piC_pdsi_regrid) mask = self.solver.eofs()[0].mask # grid=self.model.getGrid() nyears = piC_pdsi_regrid.shape[0] # tax=cdms.createAxis(np.arange(piC_pdsi.shape[0])) # tax.designateTime() # tax.units = 'years since 0000-7-1' # tax.id="time" # piC_pdsi.setAxis(0,tax) # piC_pdsi_regrid = piC_pdsi.regrid(grid,regridTool='regrid2') piC_mask = mask_data(piC_pdsi_regrid, mask) newmask = np.prod(~piC_mask.mask, axis=0) if solver is None: solver = Eof(mask_data(self.mma, newmask == 0), weights='area') fac = da.get_orientation(solver) p = solver.projectField(piC_mask)[:, 0] * fac for i in range(npiC)[1:]: fname = files[i] f = cdms.open(fname) piC_pdsi_regrid = f("pdsi_summer") piC_pdsi_regrid = MV.masked_where(np.isnan(piC_pdsi_regrid), piC_pdsi_regrid) # piC_pdsi = MV.masked_where(np.isnan(piC_pdsi),piC_pdsi) nyears += piC_pdsi_regrid.shape[0] # tax=cdms.createAxis(np.arange(piC_pdsi.shape[0])) # tax.designateTime() # tax.units = 'years since 0000-7-1' # tax.id="time" # piC_pdsi.setAxis(0,tax) # piC_pdsi_regrid = piC_pdsi.regrid(grid,regridTool='regrid2') piC_mask = mask_data(piC_pdsi_regrid, mask) newmask = np.prod(~piC_mask.mask, axis=0) solver = Eof(mask_data(self.mma, newmask == 0), weights='area') fac = da.get_orientation(solver) f.close() p = MV.concatenate((p, fac * solver.projectField(piC_mask)[:, 0])) tax = cdms.createAxis(np.arange(nyears)) tax.designateTime() tax.units = 'years since 0000-7-1' tax.id = "time" p.setAxis(0, tax) return p
# Read SST anomalies using the cdms2 module from UV-CDAT. The file contains # November-March averages of SST anomaly in the central and northern Pacific. filename = example_data_path('sst_ndjfm_anom.nc') ncin = cdms2.open(filename, 'r') sst = ncin('sst') ncin.close() # Create an EOF solver to do the EOF analysis. Square-root of cosine of # latitude weights are applied before the computation of EOFs. solver = Eof(sst, weights='coslat') # Retrieve the leading EOF, expressed as the correlation between the leading # PC time series and the input SST anomalies at each grid point, and the # leading PC time series itself. eof1 = solver.eofsAsCorrelation(neofs=1) pc1 = solver.pcs(npcs=1, pcscaling=1) # Plot the leading EOF expressed as correlation in the Pacific domain. lons, lats = eof1.getLongitude()[:], eof1.getLatitude()[:] clevs = np.linspace(-1, 1, 11) ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190)) fill = ax.contourf(lons, lats, eof1(squeeze=True), clevs, transform=ccrs.PlateCarree(), cmap=plt.cm.RdBu_r) ax.add_feature(cfeature.LAND, facecolor='w', edgecolor='k') cb = plt.colorbar(fill, orientation='horizontal') cb.set_label('correlation coefficient', fontsize=12) plt.title('EOF1 expressed as correlation', fontsize=16) # Plot the leading PC time series. plt.figure() years = range(1962, 2012)
# Read SST anomalies using the cdms2 module from UV-CDAT. The file contains # November-March averages of SST anomaly in the central and northern Pacific. filename = example_data_path('sst_ndjfm_anom.nc') ncin = cdms2.open(filename, 'r') sst = ncin('sst') ncin.close() # Create an EOF solver to do the EOF analysis. Square-root of cosine of # latitude weights are applied before the computation of EOFs. solver = Eof(sst, weights='coslat') # Retrieve the leading EOF, expressed as the correlation between the leading # PC time series and the input SST anomalies at each grid point, and the # leading PC time series itself. eof1 = solver.eofsAsCorrelation(neofs=1) pc1 = solver.pcs(npcs=1, pcscaling=1) # Plot the leading EOF expressed as correlation in the Pacific domain. lons, lats = eof1.getLongitude()[:], eof1.getLatitude()[:] clevs = np.linspace(-1, 1, 11) ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=190)) fill = ax.contourf(lons, lats, eof1(squeeze=True).data, clevs, transform=ccrs.PlateCarree(), cmap=plt.cm.RdBu_r) ax.add_feature(cfeature.LAND, facecolor='w', edgecolor='k') cb = plt.colorbar(fill, orientation='horizontal') cb.set_label('correlation coefficient', fontsize=12) plt.title('EOF1 expressed as correlation', fontsize=16) # Plot the leading PC time series. plt.figure() years = range(1962, 2012)
def eof_analysis_get_variance_mode( mode, timeseries, eofn, eofn_max=None, debug=False, EofScaling=False, save_multiple_eofs=False, ): """ NOTE: Proceed EOF analysis Input - mode (string): mode of variability is needed for arbitrary sign control, which is characteristics of EOF analysis - timeseries (cdms2 variable): time varying 2d array, so 3d array (time, lat, lon) - eofn (integer): Target eofs to be return - eofn_max (integer): number of eofs to diagnose (1~N) Output 1) When 'save_multiple_eofs = False' - eof_Nth: eof pattern (map) for given eofs as eofn - pc_Nth: corresponding principle component time series - frac_Nth: cdms2 array but for 1 single number which is float. Preserve cdms2 array type for netCDF recording. fraction of explained variance - reverse_sign_Nth: bool - solver 2) When 'save_multiple_eofs = True' - eof_list: list of eof patterns (map) for given eofs as eofn - pc_list: list of corresponding principle component time series - frac_list: list of cdms2 array but for 1 single number which is float. Preserve cdms2 array type for netCDF recording. fraction of explained variance - reverse_sign_list: list of bool - solver """ if debug: print("Lib-EOF: timeseries.shape:", timeseries.shape) debug_print("Lib-EOF: solver", debug) if eofn_max is None: eofn_max = eofn save_multiple_eofs = False # EOF (take only first variance mode...) --- solver = Eof(timeseries, weights="area") debug_print("Lib-EOF: eof", debug) # pcscaling=1 by default, return normalized EOFs eof = solver.eofsAsCovariance(neofs=eofn_max, pcscaling=1) debug_print("Lib-EOF: pc", debug) if EofScaling: # pcscaling=1: scaled to unit variance # (i.e., divided by the square-root of their eigenvalue) pc = solver.pcs(npcs=eofn_max, pcscaling=1) else: pc = solver.pcs(npcs=eofn_max) # pcscaling=0 by default # fraction of explained variance frac = solver.varianceFraction() debug_print("Lib-EOF: frac", debug) # For each EOFs... eof_list = [] pc_list = [] frac_list = [] reverse_sign_list = [] for n in range(0, eofn_max): eof_Nth = eof[n] pc_Nth = pc[:, n] frac_Nth = cdms2.createVariable(frac[n]) # Arbitrary sign control, attempt to make all plots have the same sign reverse_sign = arbitrary_checking(mode, eof_Nth) if reverse_sign: eof_Nth = MV2.multiply(eof_Nth, -1.0) pc_Nth = MV2.multiply(pc_Nth, -1.0) # time axis pc_Nth.setAxis(0, timeseries.getTime()) # Supplement NetCDF attributes frac_Nth.units = "ratio" pc_Nth.comment = "".join( [ "Non-scaled time series for principal component of ", str(eofn), "th variance mode", ] ) # append to lists for returning eof_list.append(eof_Nth) pc_list.append(pc_Nth) frac_list.append(frac_Nth) reverse_sign_list.append(reverse_sign) # return results if save_multiple_eofs: return eof_list, pc_list, frac_list, reverse_sign_list, solver else: # Remove unnecessary dimensions (make sure only taking requested eofs) eof_Nth = eof_list[eofn - 1] pc_Nth = pc_list[eofn - 1] frac_Nth = frac_list[eofn - 1] reverse_sign_Nth = reverse_sign_list[eofn - 1] return eof_Nth, pc_Nth, frac_Nth, reverse_sign_Nth, solver
def NatureRevisions_Figure5(D): aerosol_start = cdtime.comptime(1950,1,1) aerosol_stop = cdtime.comptime(1975,12,31) aerosolsolver=Eof(D.ALL.mma(time=(aerosol_start,aerosol_stop)),weights='area') fac=da.get_orientation(aerosolsolver) plt.subplot(221) m=b.landplot(fac*aerosolsolver.eofs()[0],vmin=-.1,vmax=.1) m.fillcontinents(color="gray",zorder=0) varex= str(int(100*np.round(aerosolsolver.varianceFraction()[0],2))) plt.title("(a)")#: 1950-1975 historical fingerprint ("+varex+"% of variance explained)",fontsize=8) m.drawcoastlines(color='gray') plt.ylim(-60,90) plt.colorbar(orientation='horizontal',label='EOF loading') plt.subplot(222) Plotting.time_plot(fac*aerosolsolver.pcs()[:,0],color=cm.Greys(.8),lw=1) plt.title("(b)")#: Associated PC",fontsize=8) plt.ylabel("Temporal amplitude") plt.subplot(223) target_obs,cru_proj,dai_proj=pdsi_time_series(D,aerosol_start,aerosol_stop,aerosols=True) plt.legend(fontsize=6) plt.title("(c)")#: Projections on fingerprint",fontsize=8) plt.subplot(224) # target_obs = D.ALL.get_tree_ring_projection(solver = aerosolsolver)(time=(aerosol_start,aerosol_stop)) L=len(target_obs) modslopes,noiseterm = D.ALL.sn_at_time(aerosol_start,L,overlapping=True,solver=aerosolsolver) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs)) plt.hist(modslopes/ns,20,normed=True,color=get_dataset_color("h85"),alpha=.5) lab = str(aerosol_start.year)+"-"+str(aerosol_stop.year) da.fit_normals_to_data(modslopes/ns,color=get_dataset_color("h85"),lw=1,label="H85") plt.hist(noiseterm/ns,20,normed=True,color=get_dataset_color("tree_noise"),alpha=.5) da.fit_normals_to_data(noiseterm/ns,color=get_dataset_color("tree_noise"),lw=1,label="Pre-1850 tree rings") percentiles=[] plt.axvline(signal/ns,color=get_dataset_color("tree"),lw=1,label=lab+" GDA trend") noise_percentile=stats.percentileofscore(noiseterm.tolist(),signal) h85_percentile=stats.percentileofscore(modslopes.tolist(),signal) percentiles += [noise_percentile,h85_percentile] daitrend = cmip5.get_linear_trends(dai_proj) print "DAI slope is "+str(daitrend) daisignal = daitrend/ns plt.axvline(daisignal,color=get_dataset_color("dai"),lw=1,label="Dai") print "DAI signal/noise is "+str(daisignal) crutrend = cmip5.get_linear_trends(cru_proj) print "CRU slope is "+str(crutrend) crusignal = crutrend/ns plt.axvline(crusignal,color=get_dataset_color("cru"),lw=1,label="CRU") print "CRU signal/noise is "+str(crusignal) plt.legend(loc=0,fontsize=8) plt.xlabel("S/N") plt.ylabel("Normalized Frequency") plt.title("(d)")#: Detection and Attribution Results",fontsize=8) fig=plt.gcf() for ax in fig.axes: plt.setp(ax.xaxis.get_label(),fontsize=6) plt.setp(ax.yaxis.get_label(),fontsize=6) plt.setp(ax.get_xticklabels(),fontsize=6) plt.setp(ax.get_yticklabels(),fontsize=6) ax=fig.axes[0] ax.set_title("(a)",fontsize=6) ax=fig.axes[2] ax.set_title("(b)",fontsize=6) ax=fig.axes[3] ax.set_title("(c)",fontsize=6) ax=fig.axes[4] ax.set_title("(d)",fontsize=6) leg=ax.legend(fontsize=6,ncol=1,loc=2) leg.set_frame_on(False) cax=fig.axes[1] ticklabels=["-0.1","","-0.05","","0","","0.05","","0.1"] cax.set_xticklabels(ticklabels) plt.setp(cax.xaxis.get_ticklabels(),fontsize=6) plt.setp(cax.xaxis.get_label(),fontsize=6)
class DroughtAtlas(): def __init__(self,name,cutoff='0001-1-1'): self.name=name #if name.find("+")<0: f = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name+".nc") obs = f("pdsi") self.obs = MV.masked_where(np.isnan(obs),obs) self.obs = MV.masked_where(np.abs(self.obs)>90,self.obs) self.obs = self.obs(time=(cutoff,'2020-12-31')) self.obs=mask_data(self.obs,self.obs.mask[0]) #Make all the obs have the same mask as the first datapoint f.close() fm = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name+".hist.rcp85.nc") self.model=get_rid_of_bad(fm("pdsi")) self.model=MV.masked_where(np.isnan(self.model),self.model) fm.close() # else: #DEPRECATED: MERGE observations onto common grid using old code # name1,name2=name.split("+") # f1 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name1+".nc") # obs1 = f1("pdsi") # obs1 = MV.masked_where(np.isnan(obs1),obs1) # obs1 = MV.masked_where(np.abs(obs1)>90,obs1) # obs1 = obs1(time=(cutoff,'2017-12-31')) # obs1=mask_data(obs1,obs1.mask[0]) # f1.close() # fm1 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name1+".hist.rcp85.nc") # model1=get_rid_of_bad(fm1("pdsi")) # model1=MV.masked_where(np.isnan(model1),model1) # fm1.close() # f2 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name2+".nc") # obs2 = f2("pdsi") # obs2 = MV.masked_where(np.isnan(obs2),obs2) # obs2 = MV.masked_where(np.abs(obs2)>90,obs2) # obs2 = obs2(time=(cutoff,'2017-12-12')) # obs2=mask_data(obs2,obs2.mask[0]) # f2.close() # fm2 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name2+".hist.rcp85.nc") # model2=get_rid_of_bad(fm2("pdsi")) # model2=MV.masked_where(np.isnan(model2),model2) # fm2.close() # self.obs=merge.merge(obs1,obs2) # self.model=merge.merge(model1,model2) mma = MV.average(self.model,axis=0) self.mma = mask_data(mma,self.obs[0].mask) #make all the models have the same mask self.solver = Eof(self.mma) eofmask=self.solver.eofs()[0].mask self.fac=da.get_orientation(self.solver) self.projection = self.solver.projectField(mask_data(self.obs,eofmask))[:,0]*self.fac self.noise = self.projection(time=('1-1-1','1850-1-1')) def plot_fingerprint(self,ax1=None,ax2=None): eof1=self.solver.eofs()[0]*self.fac #v=max([np.abs(np.ma.min(eof1)),np.abs(np.ma.max(eof1))]) pc1 = self.solver.pcs()[:,0]*self.fac if ax1 is None: ax1=plt.subplot(211) if self.name not in ["OWDA","MXDA","NADA","MADA"]: #m=bmap(eof1,cmap=cm.BrBG,vmin=-v,vmax=v) m=landplot(eof1) m.drawcoastlines() #plt.colorbar(orientation="horizontal",) else: m=plot_regional(self.solver.eofs()[0],self.name,cmap=cm.BrBG) plt.subplot(212) time_plot(pc1) def model_projections(self): to_proj = mask_data(self.model,self.solver.eofs()[0].mask) P=MV.zeros(to_proj.shape[:2]) for i in range(to_proj.shape[0]): tp = to_proj[i] mma_mask = mask_data(self.mma,tp[0].mask) solver = Eof(mma_mask) fac=da.get_orientation(solver) P[i] = solver.projectField(tp)[:,0]*fac P.setAxisList(to_proj.getAxisList()[:2]) self.P=P def sn_at_time(self,start_time,L,overlapping=True): if not hasattr(self,"P"): self.model_projections() stop_time=start_time.add(L,cdtime.Years) modslopes = cmip5.get_linear_trends(self.P(time=(start_time,stop_time))) if overlapping: noiseterm = bootstrap_slopes(self.noise,L) else: noiseterm = da.get_slopes(self.noise,L)/365. return modslopes,noiseterm def obs_SN(self,start_time,stop_time=None,overlapping=True,include_dai=False): if stop_time is None: stop_time=cmip5.stop_time(self.projection) target_obs = self.projection(time=(start_time,stop_time)) L=len(target_obs) modslopes,noiseterm = self.sn_at_time(start_time,L,overlapping=True) ns=np.std(noiseterm) signal = float(cmip5.get_linear_trends(target_obs))/ns plt.hist(modslopes/ns,20,normed=True,color=cm.Oranges(.8),alpha=.5) lab = str(start_time.year)+"-"+str(stop_time.year) da.fit_normals_to_data(modslopes/ns,color=cm.Oranges(.9),label=lab+" Model projections") plt.hist(noiseterm/ns,20,normed=True,color=cm.Greens(.8),alpha=.5) da.fit_normals_to_data(noiseterm/ns,color=cm.Greens(.9),label="Pre-1850 tree-ring reconstructions") plt.axvline(signal,color=cm.Blues(.8),lw=3,label=lab+" Tree-ring reconstructions") print signal if include_dai: dai_proj = self.project_dai_on_solver(start=start_time) daitrend = cmip5.get_linear_trends(dai_proj(time=(start_time,stop_time))) plt.legend(loc=0) def time_of_emergence(self,start_time,times = np.arange(10,76),plot=True,**kwargs): if not hasattr(self,"P"): self.model_projections() nmod,nyears = self.P.shape self.TOE=MV.zeros((nmod,len(times))) for i in range(len(times)): L=times[i] modslopes,noiseterm = self.sn_at_time(start_time,L) sns=modslopes/np.std(noiseterm) self.TOE[:,i]=sns self.TOE.setAxis(0,self.P.getAxis(0)) if plot: endyears = start_time.year+times plt.plot(endyears,np.ma.average(self.TOE.asma(),axis=0),lw=4,label=self.name+" model mean signal",**kwargs) plt.fill_between(endyears,np.ma.min(self.TOE.asma(),axis=0),np.ma.max(self.TOE.asma(),axis=0),alpha=.4,**kwargs) plt.axhline(stats.norm.interval(.9)[-1],c="r",lw=3) plt.xlabel("Trend end year") plt.ylabel("Signal-to-noise ratio") def project_dai_on_solver(self,start='1970-1-1'): f = cdms.open("../DROUGHT_ATLAS/OBSERVATIONS/DAI_selfcalibrated.nc") dai_jja=f("pdsi") f.close() dai_jja_mask = mask_data(dai_jja,self.obs[0].mask)(time=(start,'2018-12-31')) newmask = np.prod(~dai_jja_mask.mask,axis=0) dai_jja_mask = mask_data(dai_jja_mask,newmask==0) solver = Eof(mask_data(self.mma,newmask==0)) dai_jja_mask = mask_data(dai_jja_mask,solver.eofs()[0].mask) fac = da.get_orientation(solver) return solver.projectField(dai_jja_mask)[:,0]*fac