Beispiel #1
0
def model_projections(fingerprint, h85, direction):
    solver = fingerprint.solvers[direction]
    fac = da.get_orientation(solver)
    if direction == "multi":
        modeldata = [h85.reshaped["west"], h85.reshaped["east"]]
        shaper = h85.reshaped["west"]
    else:
        modeldata = h85.reshaped[direction]
        shaper = modeldata
    P = MV.zeros(shaper.shape[:-1]) + 1.e20
    for i in range(shaper.shape[0]):
        try:
            if direction != "multi":
                P[i] = fac * solver.projectField(modeldata[i])[:, 0]
            else:
                P[i] = fac * solver.projectField(
                    [h85.reshaped["west"][i], h85.reshaped["east"][i]])[:, 0]
        except:
            continue

    Pm = MV.masked_where(np.abs(P) > 1.e10, P)
    Pm.setAxisList(shaper.getAxisList()[:-1])
    Pm.getAxis(1).id = "time"
    return Pm
    def concatenate_piControl(self, season=None, compressed=False):
        experiment = "piControl"
        fnames = sorted(
            get_ensemble_filenames(self.variable, self.region, experiment))
        #models=sorted(self.ensemble_dict.keys())
        models = get_ok_models(self.region)

        nmod = len(models)
        ntimes = []
        model_names = []
        #Loop over without loading data to figure out the shortest length control run
        for model in models:
            # print(model)
            I = np.where([x.split(".")[2] == model for x in fnames])[0]
            if len(I) > 0:
                first_member = int(I[0])

                fname = fnames[first_member]

                model_names += [fname]
                f = cdms.open(fname)
                ntimes += [int(f[self.variable].shape[0])]
                f.close()
        L = np.min(ntimes)
        #Set the time axis to be the time axis of the shortest control rin
        imin = np.argmin(ntimes)

        fshortest = model_names[imin]
        f = cdms.open(fshortest)
        tax = f(self.variable).getTime()
        tax.id = 'time'
        tax.designateTime()
        f.close()
        #Load data
        #SingleMember=np.ma.zeros((len(model_names),L))+1.e20
        SingleMember = np.ma.zeros((nmod, L)) + 1.e20
        i = 0
        for model in models:
            I = np.where([x.split(".")[2] == model for x in fnames])[0]
            if len(I) > 0:
                first_member = I[0]
                fname = fnames[first_member]
                f = cdms.open(fname)
                vdata = f(self.variable)

                SingleMember[i] = vdata[:L]
                i += 1
            else:
                if self.verbose:
                    print("No piControl data for " + model + " " +
                          self.variable)

            f.close()

        #Historical units are already converted; need to convert piControl from
        #kg m-2 s-1 to mm day-1
        #if self.variable in ["pr","evspsbl","prsn","mrros","mrro"]:
        #   SingleMember = SingleMember*86400.
        SingleMember = MV.masked_where(
            np.abs(SingleMember) > 1.e10, SingleMember)
        SingleMember = MV.array(SingleMember)
        SingleMember.setAxis(1, tax)

        SingleMember.setAxis(0, cmip5.make_model_axis(models))
        ###KLUDGE: FIRST YEAR IS ZERO- FIX THIS IN DOWNLOADER
        SingleMember = MV.masked_where(SingleMember == 0, SingleMember)
        # if self.variable in ["mrsos","mrso"]:
        #     if not raw:
        #         SingleMember=self.standardize_zscore(SingleMember)
        # else:
        #     if not raw:
        #         SingleMember=self.convert_to_percentage(SingleMember)
        if season is None:
            return SingleMember
        cdutil.setTimeBoundsMonthly(SingleMember)
        seasonal = getattr(cdutil, season).departures(SingleMember)

        return DA_tools.concatenate_this(seasonal, compressed=compressed)
Beispiel #3
0
    def obs_SN(self,
               start_time,
               stop_time=None,
               overlapping=True,
               include_trees=True,
               include_dai=False,
               include_cru=False,
               include_piControl=False,
               noisestart=None,
               solver=None,
               plot=True):
        to_return = {}
        if stop_time is None:
            stop_time = cmip5.stop_time(self.get_tree_ring_projection())
        target_obs = self.get_tree_ring_projection(solver=solver)(
            time=(start_time, stop_time))
        L = len(target_obs)
        modslopes, noiseterm = self.sn_at_time(start_time,
                                               L,
                                               overlapping=True,
                                               noisestart=noisestart,
                                               solver=solver)
        ns = np.std(noiseterm)
        signal = float(cmip5.get_linear_trends(target_obs))
        if plot:
            plt.hist(modslopes / ns,
                     20,
                     normed=True,
                     color=get_dataset_color("h85"),
                     alpha=.5)
            lab = str(start_time.year) + "-" + str(stop_time.year)
            da.fit_normals_to_data(modslopes / ns,
                                   color=get_dataset_color("h85"),
                                   lw=1,
                                   label="H85")

            plt.hist(noiseterm / ns,
                     20,
                     normed=True,
                     color=get_dataset_color("tree_noise"),
                     alpha=.5)
            da.fit_normals_to_data(noiseterm / ns,
                                   color=get_dataset_color("tree_noise"),
                                   lw=1,
                                   label="Pre-1850 tree rings")

        if include_trees:
            percentiles = []
            if plot:
                plt.axvline(signal / ns,
                            color=get_dataset_color("tree"),
                            lw=1,
                            label=lab + " GDA trend")
            print signal / ns
            noise_percentile = stats.percentileofscore(noiseterm.tolist(),
                                                       signal)
            h85_percentile = stats.percentileofscore(modslopes.tolist(),
                                                     signal)
            percentiles += [noise_percentile, h85_percentile]
            to_return["trees"] = [signal / ns] + percentiles
        if include_dai:
            daipercentiles = []
            dai_proj = self.project_dai_on_solver(start=start_time,
                                                  solver=solver)
            daitrend = float(
                cmip5.get_linear_trends(dai_proj(time=(start_time,
                                                       stop_time))))
            daisignal = daitrend / ns
            noise_percentile = stats.percentileofscore(noiseterm.tolist(),
                                                       daitrend)
            h85_percentile = stats.percentileofscore(modslopes.tolist(),
                                                     daitrend)
            daipercentiles += [noise_percentile, h85_percentile]
            if plot:
                plt.axvline(daisignal,
                            color=get_dataset_color("dai"),
                            lw=1,
                            label="Dai")
            print "DAI signal/noise is " + str(daisignal)
            to_return["dai"] = [daitrend / ns] + daipercentiles

        if include_cru:
            crupercentiles = []
            cru_proj = self.project_cru_on_solver(start=start_time,
                                                  solver=solver)
            crutrend = float(
                cmip5.get_linear_trends(cru_proj(time=(start_time,
                                                       stop_time))))
            noise_percentile = stats.percentileofscore(noiseterm.tolist(),
                                                       crutrend)
            h85_percentile = stats.percentileofscore(modslopes.tolist(),
                                                     crutrend)
            crupercentiles += [noise_percentile, h85_percentile]
            crusignal = crutrend / ns
            if plot:
                plt.axvline(crusignal,
                            color=get_dataset_color("cru"),
                            lw=1,
                            label="CRU")
            print "CRU signal/noise is " + str(crusignal)
            to_return["cru"] = [crutrend / ns] + crupercentiles
        if include_piControl:
            p = self.project_piControl_on_solver(solver=solver)
            noiseterm_mod = bootstrap_slopes(p, L)
            if plot:
                plt.hist(noiseterm_mod / ns,
                         20,
                         normed=True,
                         color=get_dataset_color("picontrol"),
                         alpha=.5)
                da.fit_normals_to_data(noiseterm_mod / ns,
                                       color=get_dataset_color("picontrol"),
                                       lw=1,
                                       label="PiControl")
            print "relative to model noise:"
            print float(signal) / np.std(noiseterm_mod)
        # percentiles+=[stats.percentileofscore(noiseterm_mod.tolist(),signal)]

        if plot:
            plt.legend(loc=0)
            plt.xlabel("S/N")
            plt.ylabel("Normalized Frequency")
        return to_return
Beispiel #4
0
    def __init__(self, name, cutoff='0001-1-1'):
        if name.find("2.5") >= 0:
            self.name = name.split("2.5")[0]
        else:
            self.name = name
        #if name.find("+")<0:
        f = cdms.open("../DROUGHT_ATLAS/PROCESSED/" + name + ".nc")
        obs = f("pdsi")
        self.obs = MV.masked_where(np.isnan(obs), obs)
        self.obs = MV.masked_where(np.abs(self.obs) > 90, self.obs)
        self.obs = self.obs(time=(cutoff, '2020-12-31'))

        self.obs = mask_data(
            self.obs, self.obs.mask[0]
        )  #Make all the obs have the same mask as the first datapoint
        f.close()
        fm = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi." + name +
                       ".hist.rcp85.nc")
        self.model = get_rid_of_bad(fm("pdsi"))
        self.model = MV.masked_where(np.isnan(self.model), self.model)
        fm.close()

        # else:
        #DEPRECATED: MERGE observations onto common grid using old code
        #     name1,name2=name.split("+")
        #     f1 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name1+".nc")
        #     obs1 = f1("pdsi")
        #     obs1 = MV.masked_where(np.isnan(obs1),obs1)
        #     obs1 = MV.masked_where(np.abs(obs1)>90,obs1)
        #     obs1 = obs1(time=(cutoff,'2017-12-31'))

        #     obs1=mask_data(obs1,obs1.mask[0])
        #     f1.close()
        #     fm1 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name1+".hist.rcp85.nc")
        #     model1=get_rid_of_bad(fm1("pdsi"))
        #     model1=MV.masked_where(np.isnan(model1),model1)
        #     fm1.close()

        #     f2 = cdms.open("../DROUGHT_ATLAS/PROCESSED/"+name2+".nc")
        #     obs2 = f2("pdsi")
        #     obs2 = MV.masked_where(np.isnan(obs2),obs2)
        #     obs2 = MV.masked_where(np.abs(obs2)>90,obs2)
        #     obs2 = obs2(time=(cutoff,'2017-12-12'))

        #     obs2=mask_data(obs2,obs2.mask[0])
        #     f2.close()
        #     fm2 = cdms.open("../DROUGHT_ATLAS/CMIP5/pdsi."+name2+".hist.rcp85.nc")
        #     model2=get_rid_of_bad(fm2("pdsi"))
        #     model2=MV.masked_where(np.isnan(model2),model2)
        #     fm2.close()

        #     self.obs=merge.merge(obs1,obs2)
        #     self.model=merge.merge(model1,model2)

        mma = MV.average(self.model, axis=0)
        self.mma = mask_data(
            mma, self.obs[0].mask)  #make all the models have the same mask
        self.solver = Eof(self.mma, weights='area')
        self.eofmask = self.solver.eofs()[0].mask

        self.fac = da.get_orientation(self.solver)
        self.projection = self.solver.projectField(
            mask_data(self.obs, self.eofmask))[:, 0] * self.fac
        self.noise = self.projection(time=('1-1-1', '1850-1-1'))
        self.P = self.model_projections()
Beispiel #5
0
    def __init__(self, experiment):
        self.experiment = experiment
        f = cdms.open("DATA/cmip5.sahel_precip." + experiment + ".nc")
        west = f("pr_W")
        east = f("pr_CE")

        self.data = {}
        self.data["west"] = west
        self.data["east"] = east

        west_a = cdutil.ANNUALCYCLE.departures(west)
        east_a = cdutil.ANNUALCYCLE.departures(east)

        if experiment != "piControl":
            nmod, nmon = west_a.shape
            west_rs = west_a.reshape((nmod, nmon / 12, 12))
            west_rs.setAxis(0, west.getAxis(0))
            tax = cdms.createAxis(west.getTime()[6::12])
            tax.id = 'time'
            tax.designateTime()
            tax.units = west.getTime().units
            west_rs.setAxis(1, tax)

            westsolver = Eof(MV.average(west_rs, axis=0))
        else:
            west_a = da.concatenate_this(west_a, compressed=True)
            nmon, = west_a.shape
            tax = cdms.createAxis(np.arange(west_a.shape[0]))
            tax.units = 'months since 0001-1-15'
            tax.id = 'time'
            tax.designateTime()
            taxC = tax.asComponentTime()
            test = [x.torel('days since 0001-1-15').value for x in taxC]
            tax_days = cdms.createAxis(test)
            tax_days.designateTime()
            tax_days.id = 'time'
            tax_days.units = 'days since 0001-1-15'
            west_a.setAxis(0, tax_days)
            taxmonthly = cdms.createAxis(west_a.getTime()[6::12])
            taxmonthly.units = west_a.getTime().units
            taxmonthly.designateTime()
            taxmonthly.id = 'time'
            west_rs = west_a.reshape((nmon / 12, 12))
            west_rs.setAxis(0, taxmonthly)
            westsolver = Eof(west_rs)

        if experiment != "piControl":
            nmod, nmon = east_a.shape
            east_rs = east_a.reshape((nmod, nmon / 12, 12))
            east_rs.setAxis(0, east.getAxis(0))
            east_rs.setAxis(1, tax)

            eastsolver = Eof(MV.average(east_rs, axis=0))
        else:
            east_a = da.concatenate_this(east_a, compressed=True)
            east_a.setAxis(0, tax_days)
            nmon, = east_a.shape
            east_rs = east_a.reshape((nmon / 12, 12))
            east_rs.setAxis(0, taxmonthly)
            eastsolver = Eof(east_rs)

        facwest = da.get_orientation(westsolver)
        faceast = da.get_orientation(eastsolver)

        self.solvers = {}
        self.solvers["east"] = eastsolver
        self.solvers["west"] = westsolver

        self.reshaped = {}
        self.reshaped["east"] = east_rs
        self.reshaped["west"] = west_rs

        if len(self.reshaped["west"].shape) > 2:

            data = [
                MV.average(cmip5.ensemble2multimodel(self.reshaped["west"]),
                           axis=0),
                MV.average(cmip5.ensemble2multimodel(self.reshaped["east"]),
                           axis=0)
            ]
        else:
            data = [self.reshaped["west"], self.reshaped["east"]]
        msolver = MultivariateEof(data)
        self.solvers["multi"] = msolver
        self.anomalies = {}
        self.anomalies["east"] = east_a
        self.anomalies["west"] = west_a
Beispiel #6
0
def NatureRevisions_Figure1(D):
    #fig=naturefig(1.5,heightfrac=1) 
    i=1
    
    
    letters =["a","b","c","d","e"]
    letters2=["f","g","h","i","j"]
    for X in ["ANZDA","MADA","MXDA","NADA","OWDA"]:
        solver = getattr(D,X).solver
        fac=da.get_orientation(solver)
        calib_period=('1920-1-1','2005-12-31')
        eof1 = fac*solver.eofs()[0]
        
        
       
          
       # plt.subplot(2,5,i)
        plt.subplot(5,2,2*i-1)
        
        m=b.plot_regional(eof1,X,vmin=-.15,vmax=.15)
        
        m.drawcoastlines(color='gray')
            
      
        plt.title("("+letters[i-1]+")",fontsize=6)#: "+X+" fingerprint",fontsize=8)
        if letters[i-1]=="a":
            print "AAAA"
        plt.subplot(5,2,2*i)
        #if i==1:
         #   fig = plt.gcf()
          #  cb_ax = fig.add_axes([0.1, 0.55, 0.83, 0.02])
           # plt.colorbar(label="EOF Loading",cax=cb_ax)
        
        
       # i+=1
        #plt.subplot(2,5,i+5)
        
        
        cru=getattr(D,X).project_cru_on_solver(start='1901-1-1')
        Plotting.time_plot(cru-np.ma.average(cru(time=calib_period)),lw=1,color=get_dataset_color("CRU"),label="CRU")
        dai=getattr(D,X).project_dai_on_solver(start='1901-1-1')
        Plotting.time_plot(dai-np.ma.average(dai(time=calib_period)),lw=1,color=get_dataset_color("DAI"),label="DAI")
        trees = getattr(D,X).projection(time=('1900-1-1','1975-1-1'))
        Plotting.time_plot(trees-np.ma.average(trees(time=calib_period)),lw=1,color=get_dataset_color("tree"),label=X)
        pc1= fac*solver.pcs()[:,0](time=('1900-1-1','2050-12-31'))
        Plotting.time_plot(pc1-np.ma.average(pc1(time=calib_period)),lw=1,color='k',label="PC1")
        plt.ylim(-.3,.3)
        plt.setp(plt.gca().get_yticklabels(),fontsize=6)
        
            
        plt.ylabel("Temporal Amplitude",fontsize=6)
        if i!=5:
            plt.xticks([])
            
        else:
            plt.legend(loc=0,ncol=2,fontsize=6)
            plt.setp(plt.gca().get_xticklabels(),fontsize=6)
            plt.xlabel("Year",fontsize=6)
        
        
        plt.title("("+letters2[i-1]+")",fontsize=6)#: "+X+" PC1 and projections",fontsize=8)
        i+=1
    #colorbar kludge
    if 1:
        fig = plt.gcf()
        #fig.subplots_adjust(left=-.15)
        #fig.subplots_adjust(bottom=.075)
        axes = plt.gcf().axes
        ax = axes[0]
        #left=fig.subplotpars.left+fig.subplotpars.hspace/2.
        left=0.03
        height = 0.01
        #bottom=fig.subplotpars.bottom-height*1.5
        bottom=.05
        width=.22
        #width=0.5-fig.subplotpars.hspace*1.5
    
        
        
        cb_ax = fig.add_axes([left,bottom,width,height])    
        ax.clear()
        eof1= D.ANZDA.solver.eofs()[0]*da.get_orientation(D.ANZDA.solver)
        m=b.plot_regional(eof1,"ANZDA",vmin=-.15,vmax=.15,ax=ax,cax=cb_ax,orientation='horizontal')
        m.drawcoastlines(color='gray',ax=ax)
        #cb_ax.yaxis.set_ticks_position('left')
        #cb_ax.yaxis.set_label_position("left")
        cb_ax.set_xticklabels(["-0.15","","","0","","","0.15"])
        plt.setp(cb_ax.get_xticklabels(),fontsize=6)
        plt.setp(cb_ax.xaxis.get_label(),fontsize=6)
        fig.axes[0].set_title("(a)",fontsize=6) # for some reason this disappears

        return cb_ax
Beispiel #7
0
def NatureRevisions_Figure5(D):
    aerosol_start = cdtime.comptime(1950,1,1)
    aerosol_stop = cdtime.comptime(1975,12,31)
    aerosolsolver=Eof(D.ALL.mma(time=(aerosol_start,aerosol_stop)),weights='area')
    fac=da.get_orientation(aerosolsolver)
    plt.subplot(221)
    m=b.landplot(fac*aerosolsolver.eofs()[0],vmin=-.1,vmax=.1)
    m.fillcontinents(color="gray",zorder=0)
    
    varex= str(int(100*np.round(aerosolsolver.varianceFraction()[0],2)))
    plt.title("(a)")#: 1950-1975 historical fingerprint ("+varex+"% of variance explained)",fontsize=8)
    m.drawcoastlines(color='gray')
    plt.ylim(-60,90)
    plt.colorbar(orientation='horizontal',label='EOF loading')
    plt.subplot(222)
    Plotting.time_plot(fac*aerosolsolver.pcs()[:,0],color=cm.Greys(.8),lw=1)
    plt.title("(b)")#: Associated PC",fontsize=8)
    plt.ylabel("Temporal amplitude")

    plt.subplot(223)

    target_obs,cru_proj,dai_proj=pdsi_time_series(D,aerosol_start,aerosol_stop,aerosols=True)
    plt.legend(fontsize=6)
    plt.title("(c)")#: Projections on fingerprint",fontsize=8)
    plt.subplot(224)

   # target_obs = D.ALL.get_tree_ring_projection(solver = aerosolsolver)(time=(aerosol_start,aerosol_stop))
    L=len(target_obs)
    modslopes,noiseterm = D.ALL.sn_at_time(aerosol_start,L,overlapping=True,solver=aerosolsolver)
    ns=np.std(noiseterm)
    signal = float(cmip5.get_linear_trends(target_obs))
    plt.hist(modslopes/ns,20,normed=True,color=get_dataset_color("h85"),alpha=.5)
    lab = str(aerosol_start.year)+"-"+str(aerosol_stop.year)
    da.fit_normals_to_data(modslopes/ns,color=get_dataset_color("h85"),lw=1,label="H85")

    plt.hist(noiseterm/ns,20,normed=True,color=get_dataset_color("tree_noise"),alpha=.5)
    da.fit_normals_to_data(noiseterm/ns,color=get_dataset_color("tree_noise"),lw=1,label="Pre-1850 tree rings")
    percentiles=[]
    plt.axvline(signal/ns,color=get_dataset_color("tree"),lw=1,label=lab+" GDA trend")
    
    noise_percentile=stats.percentileofscore(noiseterm.tolist(),signal)
    h85_percentile=stats.percentileofscore(modslopes.tolist(),signal)
    percentiles += [noise_percentile,h85_percentile]


    daitrend = cmip5.get_linear_trends(dai_proj)
    print "DAI slope is "+str(daitrend)
    daisignal = daitrend/ns
    
    plt.axvline(daisignal,color=get_dataset_color("dai"),lw=1,label="Dai")
    print "DAI signal/noise is "+str(daisignal)

    
    
    crutrend = cmip5.get_linear_trends(cru_proj)
    print "CRU slope is "+str(crutrend)
    crusignal = crutrend/ns
    
    plt.axvline(crusignal,color=get_dataset_color("cru"),lw=1,label="CRU")
    print "CRU signal/noise is "+str(crusignal)

   
            
       
    plt.legend(loc=0,fontsize=8)
    plt.xlabel("S/N")
    plt.ylabel("Normalized Frequency")
    plt.title("(d)")#: Detection and Attribution Results",fontsize=8)
    fig=plt.gcf()
    for ax in fig.axes:
        plt.setp(ax.xaxis.get_label(),fontsize=6)
        plt.setp(ax.yaxis.get_label(),fontsize=6)
        plt.setp(ax.get_xticklabels(),fontsize=6)
        plt.setp(ax.get_yticklabels(),fontsize=6)
    ax=fig.axes[0]
    ax.set_title("(a)",fontsize=6)
    ax=fig.axes[2]
    ax.set_title("(b)",fontsize=6)
    ax=fig.axes[3]
    ax.set_title("(c)",fontsize=6)
    ax=fig.axes[4]
    ax.set_title("(d)",fontsize=6)
    leg=ax.legend(fontsize=6,ncol=1,loc=2)
    leg.set_frame_on(False)
    cax=fig.axes[1]
    ticklabels=["-0.1","","-0.05","","0","","0.05","","0.1"]
    cax.set_xticklabels(ticklabels)
    plt.setp(cax.xaxis.get_ticklabels(),fontsize=6)
    plt.setp(cax.xaxis.get_label(),fontsize=6)
Beispiel #8
0
def average_histogram(obslist,
                      h85,
                      piC,
                      direction,
                      start=None,
                      stop=None,
                      months="JJ"):
    if months is "JJ":
        mmean = lambda x: MV.average(x[:, 5:7], axis=1)
        bigmmean = lambda X: MV.average(X[:, :, 5:7], axis=2)
    elif months is "SO":
        mmean = lambda x: MV.average(x[:, 8:10], axis=1)
        bigmmean = lambda X: MV.average(X[:, :, 8:10], axis=2)
    elif months is "JJA":
        mmean = lambda x: MV.average(x[:, 5:8], axis=1)
        bigmmean = lambda X: MV.average(X[:, :, 5:8], axis=2)
    elif months is "Jun":
        mmean = lambda x: x[:, 5]
        bigmmean = lambda X: MV.average(X[:, :, 5])
    if type(obslist) == type([]):
        obs = obslist[0]
    else:
        obs = obslist
    if start is None:
        start = cmip5.start_time(obs.reshaped["east"])
        start = cdtime.comptime(start.year, start.month, 1)
    if stop is None:
        stop = cmip5.stop_time(obs.reshaped["west"])
        stop = cdtime.comptime(stop.year, stop.month, 30)
    #calculate the trend in the observations

    obs_avg = mmean(obs.reshaped[direction](time=(start, stop)))

    obs_trend = cmip5.get_linear_trends(obs_avg)
    #get the h85 trends over the same time period
    H85m = bigmmean(h85.reshaped[direction])(time=(start, stop))
    H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m)
    H85_trends = cmip5.get_linear_trends(H85)
    #get the piControl projection time series
    noise = mmean(piC.reshaped[direction])
    L = len(obs_avg)
    noise_trends = da.get_slopes(noise, L)

    #plot
    plt.hist(H85_trends.compressed(),
             25,
             color=da_colors("h85"),
             alpha=.5,
             normed=True)
    plt.hist(noise_trends, 25, color=da_colors("piC"), alpha=.5, normed=True)
    da.fit_normals_to_data(H85_trends,
                           color=da_colors("h85"),
                           lw=3,
                           label="H85")
    da.fit_normals_to_data(noise_trends,
                           color=da_colors("piC"),
                           lw=3,
                           label="piControl")
    plt.axvline(obs_trend, label=obs.dataset, color=da_colors(obs.dataset))
    if type(obslist) == type([]):
        for obs in obslist[1:]:
            obs_avg = mmean(obs.reshaped[direction](time=(start, stop)))

            obs_trend = cmip5.get_linear_trends(obs_avg)

            plt.axvline(obs_trend,
                        label=obs.dataset,
                        color=da_colors(obs.dataset))
    plt.xlabel("S/N")
    plt.ylabel("Frequency")
    plt.legend(loc=0)
Beispiel #9
0
def obs_projections(fingerprint, X, direction):
    solver = fingerprint.solvers[direction]
    fac = da.get_orientation(solver)
    data = X.reshaped[direction]
    return fac * solver.projectField(data)[:, 0]
Beispiel #10
0
    def average_histogram(self,
                          direction,
                          start=None,
                          stop=None,
                          months="JJ",
                          datasets=None):
        if months is "JJ":
            mmean = lambda x: MV.average(x[:, 5:7], axis=1)
            bigmmean = lambda X: MV.average(X[:, :, 5:7], axis=2)

        elif months is "SO":
            mmean = lambda x: MV.average(x[:, 8:10], axis=1)
            bigmmean = lambda X: MV.average(X[:, :, 8:10], axis=2)
        elif months is "JJA":
            mmean = lambda x: MV.average(x[:, 5:8], axis=1)
            bigmmean = lambda X: MV.average(X[:, :, 5:8], axis=2)
        elif months is "JAS":
            mmean = lambda x: MV.average(x[:, 6:9], axis=1)
            bigmmean = lambda X: MV.average(X[:, :, 6:9], axis=2)
        elif months is "Jun":
            mmean = lambda x: x[:, 5]
            bigmmean = lambda X: MV.average(X[:, :, 5])
        elif months is "YEAR":
            mmean = lambda x: MV.average(x, axis=1)
            bigmmean = lambda X: MV.average(X, axis=2)
        if start is None:
            start = cmip5.start_time(self.gpcp.reshaped["east"])
            start = cdtime.comptime(start.year, start.month, 1)
        if stop is None:
            stop = cmip5.stop_time(self.gpcp.reshaped["east"])
            stop = cdtime.comptime(stop.year, stop.month, 30)

        #get the h85 trends over the same time period
        H85m = bigmmean(self.h85.reshaped[direction])(time=(start, stop))
        H85 = cmip5.cdms_clone(np.ma.mask_rows(H85m), H85m)
        H85_trends = cmip5.get_linear_trends(H85)
        #get the piControl projection time series
        noise = mmean(self.piC.reshaped[direction])
        L = stop.year - start.year + 1
        noise_trends = da.get_slopes(noise, L)

        #plot
        plt.hist(H85_trends.compressed(),
                 25,
                 color=da_colors("h85"),
                 alpha=.5,
                 normed=True)
        plt.hist(noise_trends,
                 25,
                 color=da_colors("piC"),
                 alpha=.5,
                 normed=True)
        da.fit_normals_to_data(H85_trends,
                               color=da_colors("h85"),
                               lw=3,
                               label="H85")
        da.fit_normals_to_data(noise_trends,
                               color=da_colors("piC"),
                               lw=3,
                               label="piControl")

        #calculate the trend in the observations
        if datasets is None:
            datasets = ["gpcp", "cmap", "precl"]
        if type(datasets) != type([]):
            datasets = [datasets]
        for dataset in datasets:
            X = self.OBS[string.upper(dataset)]
            obs_avg = mmean(X.reshaped[direction](time=(start, stop)))

            obs_trend = cmip5.get_linear_trends(obs_avg)

            plt.axvline(obs_trend, label=dataset, color=da_colors(dataset))
        plt.xlabel("S/N")
        plt.ylabel("Frequency")
        plt.legend(loc=0)