示例#1
0
def plot_sic_sst():
    rtype = "rcp45"
    cmip5_sst_anoms, cmip5_sic_anoms, cmip5_mv = load_CMIP5_anom_data(rtype)
    hadisst_sst_anoms, hadisst_sic_anoms, hadisst_mv = load_HadISST_anom_data(400)
    hadisst_sst_ref, hadisst_sic_ref = load_HadISST_ref_data(400)
    
    a = 50
    syn_SSTs_fname = get_syn_sst_filename(rtype, 1986, 2005, 6, 2050, a, 2, True)
    syn_SSTs = load_data(syn_SSTs_fname, "sst")
    syn_sst_mv = numpy.min(syn_SSTs)
    
    syn_SIC_fname = syn_SSTs_fname.replace("ssts", "sic")
    syn_SIC_fname = syn_SIC_fname.replace("sst", "sic")
    syn_SIC_all = load_data(syn_SIC_fname, "sic")
    syn_sic_mv = numpy.min(syn_SIC_all)
    
    m = 0 # month
    cmip5_sub_sic_anoms = cmip5_sic_anoms[m::12]
    hadisst_sub_sic_anoms = hadisst_sic_anoms[m::12]
    syn_sub_sic = syn_SIC_all[m::12]
    lats = numpy.array([90-x for x in range(0,180)])

    hadisst_X = numpy.arange(1850,2011)
    cmip5_X = numpy.arange(2006,2101)
    syn_X = numpy.arange(1899,2100)
    
    # reconstruct from the anomalies
    cmip5_sic = cmip5_sub_sic_anoms + hadisst_sic_ref[m]
    hadisst_sic = hadisst_sub_sic_anoms + hadisst_sic_ref[m]
    cmip5_sic[cmip5_sic < cmip5_mv] = cmip5_mv
    hadisst_sic[hadisst_sic < hadisst_mv] = hadisst_mv

    # calculate sea-ice extent    
    cmip5_sic_arc_extent_anom, cmip5_sic_ant_extent_anom = calc_sea_ice_extent(cmip5_sic, lats, 1.0e-9, cmip5_mv)
    hadisst_sic_arc_extent_anom, hadisst_sic_ant_extent_anom = calc_sea_ice_extent(hadisst_sic, lats, 1.0e-9, hadisst_mv)
    syn_sic_arc_extent, syn_sic_ant_extent = calc_sea_ice_extent(syn_sub_sic, lats, 1.0e-9, syn_sic_mv)

    # plot sea-ice extent
    sp0 = plt.subplot(211)
    sp0.plot(hadisst_X, hadisst_sic_arc_extent_anom, 'k-', lw=2.0)
    sp0.plot(cmip5_X, cmip5_sic_arc_extent_anom, 'r-', lw=1.5)
    sp0.plot(syn_X, syn_sic_arc_extent, "b-", lw=2.0)
    
    hadisst_sst = hadisst_sst_anoms[m::12] + hadisst_sst_ref[m]
    cmip5_sst = cmip5_sst_anoms[m::12] + hadisst_sst_ref[m]
    hadisst_sst[hadisst_sst < hadisst_mv] = hadisst_mv
    cmip5_sst[cmip5_sst < cmip5_mv] = cmip5_mv
    # calc NH GMSST and plot
    cmip5_sst_NH_anoms = calc_GMSST(cmip5_sst[:,:90,:], lats[:90], cmip5_mv)
    hadisst_sst_NH_anoms = calc_GMSST(hadisst_sst[:,:90,:], lats[:90], hadisst_mv)
    syn_sst_anoms = calc_GMSST(syn_SSTs[m::12,:90,:], lats[:90], syn_sst_mv)
    
    sp1 = plt.subplot(212)
    syn_X = numpy.arange(1899,2101)
    sp1.plot(hadisst_X, hadisst_sst_NH_anoms, 'k-', lw=2.0)
    sp1.plot(cmip5_X, cmip5_sst_NH_anoms, 'r-', lw=1.5)
    sp1.plot(syn_X, syn_sst_anoms, 'b-', lw=2.0)
    
    plt.show()
def create_syn_SST_PCs(run_type, ref_start, ref_end, eof_year, neofs, nsamps, model_mean=False, monthly=False):
    # load the PCs, EOFs for this year
    pcs_fname = get_cmip5_PC_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    pcs = load_data(pcs_fname, "sst")
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    eofs = load_data(eof_fname, "sst")
    
    # load the smoothed ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_smooth_fname(run_type, ref_start, ref_end, monthly)
    ens_mean = load_sst_data(ens_mean_fname, "sst")
    # we only need one ensemble mean - calculate decadal mean
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    ens_mean = ens_mean[eof_year-histo_sy]

    # transform pc data to R compatible format
    pcs = pcs.byteswap().newbyteorder()
    # create the return storage
    select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs], 'f')
    # percentile ranges
    ptiles = [0.10, 0.25, 0.50, 0.75, 0.90]
    select_PCs = numpy.random.random(select_PCs.shape)

    # now loop through each month pcs - if yearly mean then there will only be one
    for m in range(0, pcs.shape[0]):
         # fit a copula to the principle components
         pc_mvdc = fit_mvdc(pcs[m], neofs)
 
         # generate a large sample of GMSSTs and their corresponding PCs    
         sst_means_and_PCs = generate_large_sample_of_SSTs(pc_mvdc, eofs[m], ens_mean, neofs)
     
         # now sample the distribution to get nsamps number of PCs which
         # represent the distribution of GMSSTs
         select_PCs[m] = sample_SSTs(sst_means_and_PCs, neofs, nsamps, ptiles)
    
    # sort the pcs based on the first pc for each of the percentiles
    sorted_select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs], 'f')
    pts_per_pc = int(nsamps/len(ptiles))
    for m in range(0, pcs.shape[0]):
        for p in range(0, len(ptiles)):
            s = p*pts_per_pc
            e = (p+1)*pts_per_pc
            # get the first pc for this ptile
            pc0 = select_PCs[m,s:e,0]
            # sort it and get the indices
            pc0_sort = numpy.argsort(pc0)
            # sort all the pcs so that the corresponding pc0 is ascending
            for f in range(0, neofs):
                pc1 = select_PCs[m,s:e,f]
                sorted_select_PCs[m,s:e,f] = pc1[pc0_sort]
    
    # save
    out_fname = get_syn_SST_PCs_filename(run_type, ref_start, ref_end, eof_year, monthly)
    out_fname = out_fname[:-3] + "_new.nc"
    # fix the missing value meta data
    out_attrs = {"missing_value" : 2e20}
    # save the selected PCAs
    save_pcs(out_fname, sorted_select_PCs, out_attrs)
    print out_fname
def load_HadISST_ref_data(rn):
    start = 1899
    end = 2010

    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(start, end, rn)
    sic_ref_data = load_data(sic_ref_fname, "sic")
    sst_ref_data = load_data(sst_ref_fname, "sst")
    
    return sst_ref_data, sic_ref_data
def load_HadISST_anom_data(rn):
    start = 1899
    end = 2010

    sst_anom_fname, sic_anom_fname = get_HadISST_monthly_anomaly_filenames(start, end, rn)
    sst_anom_data = load_data(sst_anom_fname, "sst")
    sic_anom_data = load_data(sic_anom_fname, "sic")
    mv = get_missing_value(sst_anom_fname, "sst")
    
    return sst_anom_data, sic_anom_data, mv
def create_Ma_syn_SST_PCs(run_type, ref_start, ref_end, eof_year, neofs, ptile, model_mean=False, monthly=False):
    # load the PCs, EOFs for this year
    pcs_fname = get_cmip5_PC_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    pcs = load_data(pcs_fname, "sst")
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    eofs = load_data(eof_fname, "sst")
    
    # load the smoothed ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_smooth_fname(run_type, ref_start, ref_end, monthly)
    ens_mean = load_sst_data(ens_mean_fname, "sst")
    # we only need one ensemble mean - calculate decadal mean
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    ens_mean = ens_mean[eof_year-histo_sy]

    # transform pc data to R compatible format
    pcs = pcs.byteswap().newbyteorder()
    nsamps = 100
    nmons = pcs.shape[0]
    # create the return storage
    select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs+2], 'f')

    # now loop through each month pcs - if yearly mean then there will only be one
    for m in range(0, nmons):
         # fit a copula to the principle components
         pc_mvdc = fit_mvdc(pcs[m], neofs)
 
         # generate a large sample of GMSSTs and their corresponding PCs    
         sst_means_and_PCs = generate_Ma_large_sample_of_SSTs(pc_mvdc, eofs[m], ens_mean, neofs)
     
         # now sample the distribution to get nsamps number of PCs which
         # represent the distribution of GMSSTs
         select_PCs[m] = sample_Ma_SSTs(sst_means_and_PCs, neofs, nsamps, ptile)
    
    # sort the pcs based on the first pc for each of the percentiles
    sorted_select_PCs = numpy.zeros([nmons, 2, neofs], 'f')
    for m in range(0, nmons):
        # get the NA indices for this month
        na_idxs = select_PCs[m,:,1]
        # sort it and get the indices
        na_idxs_sort = numpy.argsort(na_idxs)
        # get the first and last in the list sorted by NA indices
        # - i.e. where the North Atlantic index is the most different
        # we just want the PCs now
        for e in range(0, neofs):
            sorted_select_PCs[m,0,e] = select_PCs[m,:,2+e][na_idxs_sort[0]]
            sorted_select_PCs[m,1,e] = select_PCs[m,:,2+e][na_idxs_sort[-1]]

    # we now have two sets of PCs - one at each end of the distribution of NA SST gradient for the desired percentile
    # save
    out_fname = get_Ma_syn_SST_PCs_filename(run_type, ref_start, ref_end, eof_year, ptile, monthly)
    # fix the missing value meta data
    out_attrs = {"missing_value" : 2e20}
    # save the selected PCAs
    save_pcs(out_fname, sorted_select_PCs, out_attrs)
    print out_fname
示例#6
0
def calc_syn_GMSST_TS(run_type, ref_start, ref_end, monthly=True):
    out_name = "./" + run_type + "_gmsst_ts.nc"
    dates = numpy.array([1899 + float(x)/12 for x in range(0, 2424)], 'f')
    if not os.path.exists(out_name):
        # get the synthetic SST filename
        #
        ptiles = numpy.array([10,25,50,75,90], 'f')
        samples = numpy.array([0,4,9,14,19], 'f')
        samples_2 = numpy.arange(0, len(ptiles)*len(samples), dtype='i')
        print len(samples_2), samples_2.dtype
        ens_ts = numpy.zeros([2, len(ptiles)*len(samples), dates.shape[0]], 'f')
        c = 0
        for ptile in ptiles:
            for a in samples:
                syn_sst_fname = get_syn_sst_filename(run_type,ref_start,ref_end,neofs,eof_year,int(ptile),ivm,monthly)
                syn_sst_fname = syn_sst_fname[:-3] + "_s" + str(int(a)) + ".nc"
                sst_data = load_sst_data(syn_sst_fname, "sst")
                gmsst_nh = calc_GMSST(sst_data[:,:90,:],1)
                gmsst_sh = calc_GMSST(sst_data[:,90:,:],2)
                ens_ts[0,c] = gmsst_nh
                ens_ts[1,c] = gmsst_sh
                c += 1
        # save the timeseries
        save_ens_ts_file(out_name, ens_ts, samples_2, dates, "gmsst")
    else:
        ens_ts = load_data(out_name, "gmsst")
    
    return ens_ts, dates
示例#7
0
def calc_syn_SICEXT_TS(run_type, ref_start, ref_end, monthly=True):
    out_name = "./" + run_type + "_sicext_ts.nc"
    dates = numpy.array([1899 + float(x)/12 for x in range(0, 2424)], 'f')
    lats = numpy.array([90-x for x in range(0,180)], 'f')
    d_lon = 1.0
    mv = -1e30
    if not os.path.exists(out_name):
        ptiles = numpy.array([10,25,50,75,90], 'f')
        samples = numpy.array([0,4,9,14,19], 'f')
        samples_2 = numpy.arange(0, len(ptiles)*len(samples), dtype='i')
        print len(samples_2), samples_2.dtype
        ens_ts = numpy.zeros([2, len(ptiles)*len(samples), dates.shape[0]], 'f')
        # get the synthetic SIC filename
        #
        c = 0
        for ptile in ptiles:
            for a in samples:
                syn_sst_fname = get_syn_sst_filename(run_type,ref_start,ref_end,neofs,eof_year,int(ptile),ivm,monthly)
                syn_sst_fname = syn_sst_fname[:-3] + "_s" + str(int(a)) + ".nc"
                syn_sic_fname = syn_sst_fname.replace("ssts", "sic").replace("sst", "sic")
                sic_data = load_sst_data(syn_sic_fname, "sic")
                sic_ext_nh, sic_ext_sh = calc_sea_ice_extent(sic_data, lats, d_lon, mv)
                print sic_ext_nh.shape, ens_ts.shape
                ens_ts[0,c] = sic_ext_nh
                ens_ts[1,c] = sic_ext_sh
                c += 1
        # save the timeseries
        save_ens_ts_file(out_name, ens_ts, samples_2, dates, "sic")
    else:
        ens_ts = load_data(out_name, "sic")*1.11
    
    return ens_ts, dates
示例#8
0
def load_hadisst_data():
    rn = 400
    start = 1899
    end = 2010
    sst_fname, sic_fname = get_HadISST_monthly_anomaly_filenames(start, end, rn)
    sst_data = load_data(sst_fname, "sst")
    sic_data = load_data(sic_fname, "sic")
    mv = get_missing_value(sst_fname, "sst")
    
    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(start, end, rn)
    sic_ref_data = load_data(sic_ref_fname, "sic")
    sst_ref_data = load_data(sst_ref_fname, "sst")
    
    sic_hadisst_fname = get_HadISST_input_filename(rn)
    sic_hadisst = load_data(sic_hadisst_fname, "sic")
    
    return sst_data, sic_data, sic_ref_data, sst_ref_data, sic_hadisst, mv
示例#9
0
def load_hadisst_data():
    rn = 400
    start = 1899
    end = 2010
    sst_fname, sic_fname = get_HadISST_monthly_anomaly_filenames(
        start, end, rn)
    sst_data = load_data(sst_fname, "sst")
    sic_data = load_data(sic_fname, "sic")
    mv = get_missing_value(sst_fname, "sst")

    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(
        start, end, rn)
    sic_ref_data = load_data(sic_ref_fname, "sic")
    sst_ref_data = load_data(sst_ref_fname, "sst")

    sic_hadisst_fname = get_HadISST_input_filename(rn)
    sic_hadisst = load_data(sic_hadisst_fname, "sic")

    return sst_data, sic_data, sic_ref_data, sst_ref_data, sic_hadisst, mv
示例#10
0
def load_CMIP5_anom_data(run_type):
    # load the HadISST file - get the name from the run number
    ref_start = 1986
    ref_end = 2005

    # get the filenames
    cmip5_sic_arctic_anom_name = get_CMIP5_ens_mean_anom_filename(
        run_type, ref_start, ref_end, "sic", "arctic")
    cmip5_tos_arctic_anom_name = get_CMIP5_ens_mean_anom_filename(
        run_type, ref_start, ref_end, "tos", "arctic")

    cmip5_sic_antarctic_anom_name = get_CMIP5_ens_mean_anom_filename(
        run_type, ref_start, ref_end, "sic", "antarctic")
    cmip5_tos_antarctic_anom_name = get_CMIP5_ens_mean_anom_filename(
        run_type, ref_start, ref_end, "tos", "antarctic")

    # load the data
    cmip5_sic_arctic_anoms = load_data(cmip5_sic_arctic_anom_name, "sic")
    cmip5_tos_arctic_anoms = load_data(cmip5_tos_arctic_anom_name, "tos")
    time = load_data(cmip5_tos_arctic_anom_name, "time")

    cmip5_sic_antarctic_anoms = load_data(cmip5_sic_antarctic_anom_name, "sic")
    cmip5_tos_antarctic_anoms = load_data(cmip5_tos_antarctic_anom_name, "tos")

    # get the missing value
    mv = get_missing_value(cmip5_tos_arctic_anom_name, "tos")

    # amalgamate the data into one array, splitting at the equator and copying the
    # arctic data into the NH and the antarctic data into the SH
    ant_s = cmip5_sic_arctic_anoms.shape[1] / 2

    cmip5_sic_arctic_anoms[:, ant_s:, :] = cmip5_sic_antarctic_anoms[:,
                                                                     ant_s:, :]
    cmip5_tos_arctic_anoms[:, ant_s:, :] = cmip5_tos_antarctic_anoms[:,
                                                                     ant_s:, :]

    # trim the first 4 years - we only want 2010 to 2100
    #    S = 12 * (2010-2006)
    S = 0
    cmip5_tos_anom_data = cmip5_tos_arctic_anoms[S:]
    cmip5_sic_anom_data = cmip5_sic_arctic_anoms[S:]

    return cmip5_tos_anom_data, cmip5_sic_anom_data, mv
def create_cmip5_rcp_anomalies(run_type, ref_start, ref_end, eof_year, percentile, monthly=True):
    # create the time series of anomalies from the mean of the various 
    # samples in the CMIP5 ensemble
    # This spans the uncertainty of the GMT response to GHG forcing in CMIP5 

    if run_type == "likely":
        load_run_type = "rcp45"
    else:
        load_run_type = run_type

    # load the eof patterns in the eof_year
    eof_fname = get_cmip5_EOF_filename(load_run_type, ref_start, ref_end, eof_year, monthly=monthly)
    eofs = load_sst_data(eof_fname, "sst")
    
    # load the principle components for the eof_year
    syn_pc_fname  = get_syn_SST_PCs_filename(load_run_type, ref_start, ref_end, eof_year, monthly=monthly)
    syn_pc_fname_new = syn_pc_fname[:-3] + "_new.nc"
    syn_pc = load_data(syn_pc_fname_new, "sst")
    
    # load the timeseries of scalings and offsets to the pcs over the CMIP5 period
    proj_pc_scale_fname = get_cmip5_proj_PC_scale_filename(load_run_type, ref_start, ref_end, eof_year, monthly=monthly)
    proj_pc_scale  = load_data(proj_pc_scale_fname, "sst_scale")
    proj_pc_offset = load_data(proj_pc_scale_fname, "sst_offset")
    
    # corresponding weights that we supplied to the EOF function
    coslat = numpy.cos(numpy.deg2rad(numpy.arange(89.5, -90.5,-1.0))).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]

    # create the timeseries of reconstructed SSTs for just this sample
    # recreate the field - monthy by month if necessary
    if monthly:
        syn_sst_rcp = numpy.ma.zeros([proj_pc_scale.shape[0], eofs.shape[2], eofs.shape[3]], 'f')
        for m in range(0, 12):
            pc_ts = syn_pc[m,percentile,:neofs] * proj_pc_scale[m::12,:neofs] + proj_pc_offset[m::12,:neofs]
            syn_sst_rcp[m::12] = reconstruct_field(pc_ts, eofs[m], neofs, wgts)
    else:
        pc_ts = syn_pc[0,percentile,:neofs] * proj_pc_scale[:,:neofs] + proj_pc_offset[:,:neofs]
        syn_sst_rcp = reconstruct_field(pc_ts, eofs[0], neofs, wgts)
    return syn_sst_rcp
def load_CMIP5_anom_data(run_type):
    # load the HadISST file - get the name from the run number
    ref_start = 1986
    ref_end = 2005
    
    # get the filenames
    cmip5_sic_arctic_anom_name = get_CMIP5_ens_mean_anom_filename(run_type, ref_start, ref_end, "sic", "arctic")
    cmip5_tos_arctic_anom_name = get_CMIP5_ens_mean_anom_filename(run_type, ref_start, ref_end, "tos", "arctic")
    
    cmip5_sic_antarctic_anom_name = get_CMIP5_ens_mean_anom_filename(run_type, ref_start, ref_end, "sic", "antarctic")
    cmip5_tos_antarctic_anom_name = get_CMIP5_ens_mean_anom_filename(run_type, ref_start, ref_end, "tos", "antarctic")
    
    # load the data
    cmip5_sic_arctic_anoms = load_data(cmip5_sic_arctic_anom_name, "sic")
    cmip5_tos_arctic_anoms = load_data(cmip5_tos_arctic_anom_name, "tos")
    time = load_data(cmip5_tos_arctic_anom_name, "time")

    cmip5_sic_antarctic_anoms = load_data(cmip5_sic_antarctic_anom_name, "sic")
    cmip5_tos_antarctic_anoms = load_data(cmip5_tos_antarctic_anom_name, "tos")
    
    # get the missing value
    mv = get_missing_value(cmip5_tos_arctic_anom_name, "tos")
    
    # amalgamate the data into one array, splitting at the equator and copying the
    # arctic data into the NH and the antarctic data into the SH
    ant_s = cmip5_sic_arctic_anoms.shape[1] / 2

    cmip5_sic_arctic_anoms[:,ant_s:,:] = cmip5_sic_antarctic_anoms[:,ant_s:,:]
    cmip5_tos_arctic_anoms[:,ant_s:,:] = cmip5_tos_antarctic_anoms[:,ant_s:,:]
    
    # trim the first 4 years - we only want 2010 to 2100
#    S = 12 * (2010-2006)
    S=0
    cmip5_tos_anom_data = cmip5_tos_arctic_anoms[S:]
    cmip5_sic_anom_data = cmip5_sic_arctic_anoms[S:]
    
    return cmip5_tos_anom_data, cmip5_sic_anom_data, mv
def plot_test_residuals(histo_sy, histo_ey, ref_start, ref_end, run_n):
    # load the yearly eofs and pcs
    yr_eof_fname = get_HadISST_residual_EOFs_fname(histo_sy, histo_ey, run_n)
    yr_eofs = load_sst_data(yr_eof_fname, "sst")
    yr_pcs_fname = get_HadISST_residual_PCs_fname(histo_sy, histo_ey, run_n)
    yr_pcs = load_data(yr_pcs_fname)
    
    # load the monthly eofs and pcs
    mn_eof_fname = get_HadISST_monthly_residual_EOFs_fname(histo_sy, histo_ey, run_n)
    mn_eofs = load_sst_data(mn_eof_fname, "sst")
    mn_pcs_fname = get_HadISST_monthly_residual_PCs_fname(histo_sy, histo_ey, run_n)
    mn_pcs = load_data(mn_pcs_fname)
    
    # load the smoothed hadisst data
#    smooth_fname  = get_HadISST_smooth_fname(histo_sy, histo_ey, run_n)
#    smooth_hadisst = load_sst_data(smooth_fname, "sst")
#    smooth_gmsst = calc_GMSST(smooth_hadisst)
#    smooth_gmsst = smooth_gmsst - numpy.mean(smooth_gmsst[1986-1899:2006-1899])
    
    # reconstruct the fields
    yr_resids = reconstruct_field(yr_pcs, yr_eofs, 20)
    mn_resids = reconstruct_field(mn_pcs, mn_eofs, 20)
        
    # calculate the gmsst
    yr_gmsst = calc_GMSST(yr_resids)
    mn_gmsst = calc_GMSST(mn_resids)
    
    # plot them
    yr_t = numpy.arange(1899,2011,1)
    mn_t = numpy.arange(1899,2011,1.0/12)
    
    sp = plt.subplot(111)
    sp.plot(yr_t, yr_gmsst, 'r', zorder=1)
    sp.plot(mn_t, mn_gmsst, 'k', zorder=0)
    sp.plot(yr_t, smooth_gmsst[:-1], 'b', lw=2.0)
    
    plt.savefig("hadisst_resids.pdf")
示例#14
0
def calc_CMIP5_PC_proj_scaling(run_type, ref_start, ref_end, eof_year, model_mean=False, monthly=False):
    # load the previously calculated PCs
    pc_fname = get_cmip5_proj_PC_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    pcs = load_data(pc_fname, "sst_pc")
    fh = netcdf_file(pc_fname, 'r')
    t_var = fh.variables["time"]
    
    # get the anomalies in the decade centred on the eof_year (-5/+4 inc.)
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    # calculate the start and end index into the netCDF files
    ri = eof_year-histo_sy
    if monthly:
        ri *= 12
    
    # For each year regress the pcs on the pcs in 2050 to determine the
    # relationship between the eof_year and the years in the time series
    npcs  = pcs.shape[2]
    n_t   = pcs.shape[1]
    offset = numpy.zeros([n_t, npcs], 'f')
    scale  = numpy.zeros([n_t, npcs], 'f')
    
    for t in range(0, n_t):
        tts_pcs = pcs[:,t,:].squeeze()
        # which month are we in?
        if monthly:
            mon = t % 12
        else:
            ref_pcs = pcs[:,ri,:].squeeze()
        for pc in range(0, npcs):
            # get the reference pcs in the eof_year - if monthly we want to get 12 
            # reference pcs
            if monthly:
                ref_pcs = pcs[:,ri+mon,:].squeeze()
            s, i, r, p, err = scipy.stats.linregress(ref_pcs[:,pc], tts_pcs[:,pc])
            scale [t,pc] = s
            offset[t,pc] = i
    
    # save the scalings
    out_name = get_cmip5_proj_PC_scale_filename(run_type, ref_start, ref_end, eof_year, model_mean, monthly)
    save_pcs_scale(out_name, offset, scale, t_var)
def create_SIC_from_mapping_file(input, output, hadisst_fit_fname, cmip5_fit_fname, rn):
    # load each mapping filename
    hadisst_mapping, mv = load_mapping(hadisst_fit_fname)
    cmip5_mapping, mv2  = load_mapping(cmip5_fit_fname)

    # load the hadisst monthly reference values
    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(1899, 2010, rn)
    sst_ref_fname = get_HadISST_monthly_reference_fname(1899, 2010, 1986, 2005, rn)
    sst_ref_data = load_data(sst_ref_fname, "sst")
    sic_ref_data = load_data(sic_ref_fname, "sic")
    lon_var, lat_var, time_var = get_syn_SST_lon_lat_time_vars(input)

    # concatenate (along the t-axis) the two mapping files
    all_mapping = numpy.concatenate((hadisst_mapping, cmip5_mapping), axis=0)
    
    # load the sst file in
    sst_input = load_data(input, "sst")
    
    # truncate to 200 years worth of mapping data
    sub_mapping = all_mapping[5:]
    sub_sst = sst_input[:]
       
    # reconstruct
    print "Constructing sea-ice"
    # remove the sst reference to produce the anomalies
    n_rpts = sub_sst.shape[0] / sst_ref_data.shape[0]
    sub_sst_anoms = sub_sst - numpy.tile(sst_ref_data, [n_rpts,1,1])

    # create the SIC from the SST
    syn_sic_anoms = calc_sic_from_sst(sub_sst_anoms, sub_mapping, mv, sub_mapping.shape[1]-1)
    # add the reference back on
    n_rpts = syn_sic_anoms.shape[0] / sic_ref_data.shape[0]
    syn_sic = syn_sic_anoms + numpy.tile(sic_ref_data, [n_rpts,1,1])
    syn_sic[syn_sic < -1] = mv
    # fix range        
    #
    print "Filling ice holes"
    sic_filled = fill_ice(syn_sic, mv)
    
    print "Removing isolated ice"
    sic_removed = remove_isolated_ice(sic_filled, mv)

    # smooth the ice with a 3x1 smoothing window
    weights = numpy.ones([1,3,1], 'f')
    print "Smoothing"
    sic_removed = syn_sic
    sic_smooth = window_smooth_3D(sic_removed, weights, mv, smooth_zero=True)
    sic_smooth[(sic_smooth > 1.0) & (sic_smooth != mv)] = 1.0
    for m in [0,1,2,3,4,10,11]:
        syn_sic[m::12][syn_sic[m::12] != mv] = numpy.abs(syn_sic[m::12][syn_sic[m::12] != mv])
    sic_smooth[(sic_smooth < 0.0) & (sic_smooth != mv)] = 0.0

    # remove the block of sea ice in the Baltic sea caused by using the
    # 1986->2005 mean in months 04 to 11
    for m in range(4,12):
        sic_smooth[m::12,28:31,210:213] = 0.0
        
    # restore the LSM
    sic_smooth[sub_sst==mv] = mv
    # save the output
    save_sic(output, sic_smooth, lon_var, lat_var, time_var, mv)
    print output
def create_syn_SST_PCs(run_type,
                       ref_start,
                       ref_end,
                       eof_year,
                       neofs,
                       nsamps,
                       model_mean=False,
                       monthly=False):
    # load the PCs, EOFs for this year
    pcs_fname = get_cmip5_PC_filename(run_type, ref_start, ref_end, eof_year,
                                      model_mean, monthly)
    pcs = load_data(pcs_fname, "sst")
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year,
                                       model_mean, monthly)
    eofs = load_data(eof_fname, "sst")

    # load the smoothed ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_smooth_fname(
        run_type, ref_start, ref_end, monthly)
    ens_mean = load_sst_data(ens_mean_fname, "sst")
    # we only need one ensemble mean - calculate decadal mean
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    ens_mean = ens_mean[eof_year - histo_sy]

    # transform pc data to R compatible format
    pcs = pcs.byteswap().newbyteorder()
    # create the return storage
    select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs], 'f')
    # percentile ranges
    ptiles = [0.10, 0.25, 0.50, 0.75, 0.90]
    select_PCs = numpy.random.random(select_PCs.shape)

    # now loop through each month pcs - if yearly mean then there will only be one
    for m in range(0, pcs.shape[0]):
        # fit a copula to the principle components
        pc_mvdc = fit_mvdc(pcs[m], neofs)

        # generate a large sample of GMSSTs and their corresponding PCs
        sst_means_and_PCs = generate_large_sample_of_SSTs(
            pc_mvdc, eofs[m], ens_mean, neofs)

        # now sample the distribution to get nsamps number of PCs which
        # represent the distribution of GMSSTs
        select_PCs[m] = sample_SSTs(sst_means_and_PCs, neofs, nsamps, ptiles)

    # sort the pcs based on the first pc for each of the percentiles
    sorted_select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs], 'f')
    pts_per_pc = int(nsamps / len(ptiles))
    for m in range(0, pcs.shape[0]):
        for p in range(0, len(ptiles)):
            s = p * pts_per_pc
            e = (p + 1) * pts_per_pc
            # get the first pc for this ptile
            pc0 = select_PCs[m, s:e, 0]
            # sort it and get the indices
            pc0_sort = numpy.argsort(pc0)
            # sort all the pcs so that the corresponding pc0 is ascending
            for f in range(0, neofs):
                pc1 = select_PCs[m, s:e, f]
                sorted_select_PCs[m, s:e, f] = pc1[pc0_sort]

    # save
    out_fname = get_syn_SST_PCs_filename(run_type, ref_start, ref_end,
                                         eof_year, monthly)
    out_fname = out_fname[:-3] + "_new.nc"
    # fix the missing value meta data
    out_attrs = {"missing_value": 2e20}
    # save the selected PCAs
    save_pcs(out_fname, sorted_select_PCs, out_attrs)
    print out_fname
示例#17
0
def create_HAPPI_SIC(input, output, cmip5_fit_fname, sy, ey, rn):
    # load each mapping filename
    cmip5_mapping, mv = load_mapping(cmip5_fit_fname)

    # load the hadisst monthly reference values
    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(
        1899, 2010, rn)
    sic_ref_data = load_data(sic_ref_fname, "sic")
    sst_ref_fname = get_HadISST_monthly_reference_fname(
        1899, 2010, 1986, 2005, rn)
    sst_ref_data = load_data(sst_ref_fname, "sst")
    lon_var, lat_var, time_var = get_syn_SST_lon_lat_time_vars(input)

    # load the sst file in
    sst_input = load_data(input, "tos")
    # sst data is in different order in file
    lenX2 = sst_input.shape[2] / 2
    sst_right = numpy.array(sst_input[:, :, lenX2:])
    sst_left = numpy.array(sst_input[:, :, :lenX2])
    # recombine
    sst_input[:, :, :lenX2] = sst_right
    sst_input[:, :, lenX2:] = sst_left

    sst_input[sst_input > 1000] = mv

    # truncate to sy->ey years worth of mapping data
    ref_yr = 2015
    rcp_offset = -10  # fudge to use RCP2.6 with RCP4.5 data
    sub_mapping = cmip5_mapping[(sy - rcp_offset - ref_yr) /
                                10:(ey - rcp_offset - ref_yr) / 10]

    # reconstruct
    print "Constructing sea-ice"
    # remove the sst reference to produce the anomalies
    n_rpts = sst_input.shape[0] / sst_ref_data.shape[0]
    sub_sst_anoms = sst_input - numpy.tile(sst_ref_data, [n_rpts, 1, 1])

    # create the SIC from the SST
    syn_sic_anoms = calc_sic_from_sst(sub_sst_anoms, sub_mapping, mv,
                                      sub_mapping.shape[1] - 1)
    # add the reference back on
    #    n_rpts = syn_sic_anoms.shape[0] / sic_ref_data.shape[0]
    #    syn_sic = syn_sic_anoms + numpy.tile(sic_ref_data, [n_rpts,1,1])
    syn_sic = syn_sic_anoms
    # apply a sigmoid, write out with multiple widths (0,6,12)
    W = 0
    if (W != 0):
        syn_sic = 1.0 / (1.0 + numpy.exp(-W * (syn_sic - 0.5)))

    # restore the LSM
    syn_sic[sst_input < -1000] = mv

    print "Filling ice holes"
    sic_filled = fill_ice(syn_sic, mv)

    print "Removing isolated ice"
    sic_removed = remove_isolated_ice(sic_filled, mv)

    # smooth the ice with a 3x1 smoothing window
    weights = numpy.ones([1, 3, 1], 'f')
    print "Smoothing"
    sic_removed[sst_input < -1000] = mv
    sic_smooth = window_smooth_3D(sic_removed, weights, mv, smooth_zero=False)
    sic_smooth[(sic_smooth > 1.0) & (sic_smooth != mv)] = 1.0
    for m in [0, 1, 2, 3, 4, 10, 11]:
        sic_smooth[m::12][sic_smooth[m::12] != mv] = numpy.abs(
            sic_smooth[m::12][sic_smooth[m::12] != mv])

    # remove the block of sea ice in the Baltic sea caused by using the
    # 1986->2005 mean in months 04 to 11
    for m in range(4, 12):
        sic_smooth[m::12, 28:31, 210:213] = 0.0

    if ey == 2101:
        # create 2101
        sic_smooth[-24:-12] = sic_smooth[-36:-24]
        sic_smooth[-12:] = sic_smooth[-36:-24]

    # clamp to 0.0 to 1.0
    sic_smooth[sic_smooth > 1.0] = 1.0
    sic_smooth[(sic_smooth < 0.0) & (sic_smooth != mv)] = 0.0
    # restore the LSM
    sic_smooth[sst_input < -1000] = mv
    # switch the values back
    sic_right = numpy.array(sic_smooth[:, :, lenX2:])
    sic_left = numpy.array(sic_smooth[:, :, :lenX2])
    # recombine
    sic_smooth[:, :, :lenX2] = sic_right
    sic_smooth[:, :, lenX2:] = sic_left

    # save the output
    save_sic(output, sic_smooth, lon_var, lat_var, time_var, mv)
    print output
示例#18
0
    fh = netcdf_file(sst_fname)
    lon_var = fh.variables["longitude"]
    lat_var = fh.variables["latitude"]
    time_var = fh.variables["time"]
    return lon_var, lat_var, time_var


########################################################################################

if __name__ == "__main__":
    # these are anomalies
    sst_data, sic_data, sic_ref, sst_ref, sic_hadisst, mv = load_hadisst_data()

    # this is the full hadisst data
    hadisst_sst_fname = get_HadISST_input_filename(400)
    sst_hadisst = load_data(hadisst_sst_fname, "sst")

    lon_var, lat_var, time_var = get_HadISST_lon_lat_time_vars()

    # subset to 1850->2010
    had_sic_data = sic_hadisst[:]
    had_sst_data = sst_hadisst[:]
    S = 0
    E = sst_hadisst.shape[0]

    years = [[y - 5, y + 5] for y in range(1850, 2010, 10)]

    #    mapping = calc_sic_mapping(sst_data, sic_data, mv)
    #    save_mapping("test_map.nc", mapping, lat_var, lon_var, years, mv)

    # calculate the anomaly
def create_HAPPI_SIC(input, output, cmip5_fit_fname, sy, ey, rn):
    # load each mapping filename
    cmip5_mapping, mv = load_mapping(cmip5_fit_fname)

    # load the hadisst monthly reference values
    sst_ref_fname, sic_ref_fname = get_HadISST_monthly_ref_filenames(1899, 2010, rn)
    sic_ref_data = load_data(sic_ref_fname, "sic")
    sst_ref_fname = get_HadISST_monthly_reference_fname(1899, 2010, 1986, 2005, rn)
    sst_ref_data = load_data(sst_ref_fname, "sst")
    lon_var, lat_var, time_var = get_syn_SST_lon_lat_time_vars(input)
    
    # load the sst file in
    sst_input = load_data(input, "tos")
    # sst data is in different order in file
    lenX2 = sst_input.shape[2] / 2
    sst_right = numpy.array(sst_input[:,:,lenX2:])
    sst_left  = numpy.array(sst_input[:,:,:lenX2])
    # recombine
    sst_input[:,:,:lenX2] = sst_right
    sst_input[:,:,lenX2:] = sst_left
    
    sst_input[sst_input > 1000] = mv
    
    # truncate to sy->ey years worth of mapping data
    ref_yr = 2015
    rcp_offset = -10 # fudge to use RCP2.6 with RCP4.5 data
    sub_mapping = cmip5_mapping[(sy-rcp_offset-ref_yr)/10:(ey-rcp_offset-ref_yr)/10]
       
    # reconstruct
    print "Constructing sea-ice"
    # remove the sst reference to produce the anomalies
    n_rpts = sst_input.shape[0] / sst_ref_data.shape[0]
    sub_sst_anoms = sst_input - numpy.tile(sst_ref_data, [n_rpts,1,1])

    # create the SIC from the SST
    syn_sic_anoms = calc_sic_from_sst(sub_sst_anoms, sub_mapping, mv, sub_mapping.shape[1]-1)
    # add the reference back on
#    n_rpts = syn_sic_anoms.shape[0] / sic_ref_data.shape[0]
#    syn_sic = syn_sic_anoms + numpy.tile(sic_ref_data, [n_rpts,1,1])
    syn_sic = syn_sic_anoms
    # apply a sigmoid, write out with multiple widths (0,6,12)
    W=0
    if (W != 0):
        syn_sic = 1.0 / (1.0 + numpy.exp(-W*(syn_sic-0.5)))

    # restore the LSM
    syn_sic[sst_input<-1000] = mv
    
    print "Filling ice holes"
    sic_filled = fill_ice(syn_sic, mv)
    
    print "Removing isolated ice"
    sic_removed = remove_isolated_ice(sic_filled, mv)

    # smooth the ice with a 3x1 smoothing window
    weights = numpy.ones([1,3,1], 'f')
    print "Smoothing"
    sic_removed[sst_input<-1000] = mv
    sic_smooth = window_smooth_3D(sic_removed, weights, mv, smooth_zero=False)
    sic_smooth[(sic_smooth > 1.0) & (sic_smooth != mv)] = 1.0
    for m in [0,1,2,3,4,10,11]:
        sic_smooth[m::12][sic_smooth[m::12] != mv] = numpy.abs(sic_smooth[m::12][sic_smooth[m::12] != mv])

    # remove the block of sea ice in the Baltic sea caused by using the
    # 1986->2005 mean in months 04 to 11
    for m in range(4,12):
        sic_smooth[m::12,28:31,210:213] = 0.0
    
    if ey == 2101:
        # create 2101
        sic_smooth[-24:-12] = sic_smooth[-36:-24]
        sic_smooth[-12:] = sic_smooth[-36:-24]
    
    # clamp to 0.0 to 1.0
    sic_smooth[sic_smooth > 1.0] = 1.0
    sic_smooth[(sic_smooth < 0.0) & (sic_smooth != mv)] = 0.0
    # restore the LSM
    sic_smooth[sst_input<-1000] = mv
    # switch the values back
    sic_right = numpy.array(sic_smooth[:,:,lenX2:])
    sic_left  = numpy.array(sic_smooth[:,:,:lenX2])
    # recombine
    sic_smooth[:,:,:lenX2] = sic_right
    sic_smooth[:,:,lenX2:] = sic_left

    # save the output
    save_sic(output, sic_smooth, lon_var, lat_var, time_var, mv)
    print output
示例#20
0
def create_Ma_syn_SST_PCs(run_type,
                          ref_start,
                          ref_end,
                          eof_year,
                          neofs,
                          ptile,
                          model_mean=False,
                          monthly=False):
    # load the PCs, EOFs for this year
    pcs_fname = get_cmip5_PC_filename(run_type, ref_start, ref_end, eof_year,
                                      model_mean, monthly)
    pcs = load_data(pcs_fname, "sst")
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year,
                                       model_mean, monthly)
    eofs = load_data(eof_fname, "sst")

    # load the smoothed ensemble mean
    ens_mean_fname = get_concat_anom_sst_ens_mean_smooth_fname(
        run_type, ref_start, ref_end, monthly)
    ens_mean = load_sst_data(ens_mean_fname, "sst")
    # we only need one ensemble mean - calculate decadal mean
    histo_sy, histo_ey, rcp_sy, rcp_ey = get_start_end_periods()
    ens_mean = ens_mean[eof_year - histo_sy]

    # transform pc data to R compatible format
    pcs = pcs.byteswap().newbyteorder()
    nsamps = 100
    nmons = pcs.shape[0]
    # create the return storage
    select_PCs = numpy.zeros([pcs.shape[0], nsamps, neofs + 2], 'f')

    # now loop through each month pcs - if yearly mean then there will only be one
    for m in range(0, nmons):
        # fit a copula to the principle components
        pc_mvdc = fit_mvdc(pcs[m], neofs)

        # generate a large sample of GMSSTs and their corresponding PCs
        sst_means_and_PCs = generate_Ma_large_sample_of_SSTs(
            pc_mvdc, eofs[m], ens_mean, neofs)

        # now sample the distribution to get nsamps number of PCs which
        # represent the distribution of GMSSTs
        select_PCs[m] = sample_Ma_SSTs(sst_means_and_PCs, neofs, nsamps, ptile)

    # sort the pcs based on the first pc for each of the percentiles
    sorted_select_PCs = numpy.zeros([nmons, 2, neofs], 'f')
    for m in range(0, nmons):
        # get the NA indices for this month
        na_idxs = select_PCs[m, :, 1]
        # sort it and get the indices
        na_idxs_sort = numpy.argsort(na_idxs)
        # get the first and last in the list sorted by NA indices
        # - i.e. where the North Atlantic index is the most different
        # we just want the PCs now
        for e in range(0, neofs):
            sorted_select_PCs[m, 0, e] = select_PCs[m, :,
                                                    2 + e][na_idxs_sort[0]]
            sorted_select_PCs[m, 1, e] = select_PCs[m, :,
                                                    2 + e][na_idxs_sort[-1]]

    # we now have two sets of PCs - one at each end of the distribution of NA SST gradient for the desired percentile
    # save
    out_fname = get_Ma_syn_SST_PCs_filename(run_type, ref_start, ref_end,
                                            eof_year, ptile, monthly)
    # fix the missing value meta data
    out_attrs = {"missing_value": 2e20}
    # save the selected PCAs
    save_pcs(out_fname, sorted_select_PCs, out_attrs)
    print out_fname
示例#21
0
def create_Ma_syn_SSTs(run_type, ref_start, ref_end, sy, ey, eof_year, neofs,
                       ptile, monthly):

    # determine which hadisst ensemble member to use
    hadisst_ens_members = [
        1059, 115, 1169, 1194, 1346, 137, 1466, 396, 400, 69
    ]
    run_n = hadisst_ens_members[numpy.random.randint(0,
                                                     len(hadisst_ens_members))]

    # load the CMIP5 ensemble mean timeseries
    # load the ensemble mean of the anomalies
    cmip5_ens_mean_anoms_fname = get_concat_anom_sst_ens_mean_smooth_fname(
        run_type, ref_start, ref_end, monthly)
    cmip5_ens_mean_anoms = load_sst_data(cmip5_ens_mean_anoms_fname, "sst")

    # load the eof patterns in the eof_year
    eof_fname = get_cmip5_EOF_filename(run_type,
                                       ref_start,
                                       ref_end,
                                       eof_year,
                                       monthly=True)
    eofs = load_sst_data(eof_fname, "sst")

    # load the principle components for the eof_year
    syn_pc_fname = get_Ma_syn_SST_PCs_filename(run_type,
                                               ref_start,
                                               ref_end,
                                               eof_year,
                                               ptile,
                                               monthly=True)
    syn_pc = load_data(syn_pc_fname, "sst")

    # load the timeseries of scalings and offsets to the pcs over the CMIP5 period
    proj_pc_scale_fname = get_cmip5_proj_PC_scale_filename(run_type,
                                                           ref_start,
                                                           ref_end,
                                                           eof_year,
                                                           monthly=True)
    proj_pc_scale = load_data(proj_pc_scale_fname, "sst_scale")
    proj_pc_offset = load_data(proj_pc_scale_fname, "sst_offset")

    # corresponding weights that we supplied to the EOF function
    coslat = numpy.cos(numpy.deg2rad(numpy.arange(89.5, -90.5,
                                                  -1.0))).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]

    # create the timeseries of reconstructed SSTs for just this sample
    # recreate the field - monthy by month
    # pattern number
    pn = 0

    nmons = 12
    # sub set the mean anomalies and the proj_pc_scale and offset
    cmip5_sy = 1899
    si = (sy - cmip5_sy) * 12
    ei = (ey - cmip5_sy) * 12
    cmip5_ens_mean_anoms = cmip5_ens_mean_anoms[si:ei]

    if ey == 2101:
        # create 2101
        S = cmip5_ens_mean_anoms.shape
        cmip5_ens_mean_anoms2 = numpy.zeros([S[0] + 12, S[1], S[2]], 'f')
        cmip5_ens_mean_anoms2[:S[0]] = cmip5_ens_mean_anoms
        cmip5_ens_mean_anoms2[-12:] = cmip5_ens_mean_anoms[-12:]
        cmip5_ens_mean_anoms = cmip5_ens_mean_anoms2

    proj_pc_scale = proj_pc_scale[si - 12:ei]
    proj_pc_offset = proj_pc_offset[si - 12:ei]
    syn_sst_rcp = numpy.ma.zeros(
        [proj_pc_scale.shape[0], eofs.shape[2], eofs.shape[3]], 'f')
    #
    for pn in range(0, 2):  # two patterns per percentile
        for m in range(0, nmons):
            pc_ts = syn_pc[m, pn, :neofs] * proj_pc_scale[
                m::12, :neofs] + proj_pc_offset[m::12, :neofs]
            syn_sst_rcp[m::12] = reconstruct_field(pc_ts, eofs[m], neofs, wgts)

        # load the hadisst reference
        n_repeats = cmip5_ens_mean_anoms.shape[
            0] / 12  # number of repeats = number of years
        hadisst_ac = create_hadisst_monthly_reference(run_type, ref_start,
                                                      ref_end, n_repeats,
                                                      run_n)
        # load the internal variability - we are only interested in the 30 year observed ones
        resid_fname = get_HadISST_monthly_residuals_fname(1899, 2010, 400)
        intvar = load_data(resid_fname, "sst")
        intvar = intvar[(1973 - 1899) * 12:(2007 - 1899) * 12]
        print "cmip5_ens_mean_anoms ", cmip5_ens_mean_anoms.shape
        print "syn_sst_rcp ", syn_sst_rcp.shape
        print "hadisst_ac ", hadisst_ac.shape
        print "intvar ", intvar.shape
        out_data = cmip5_ens_mean_anoms + syn_sst_rcp + hadisst_ac + intvar
        # save the synthetic ssts
        save_Ma_syn_SSTs(out_data, run_type, ref_start, ref_end, sy, ey, ptile,
                         pn)
示例#22
0
    fh = netcdf_file(sst_fname)
    lon_var = fh.variables["longitude"]
    lat_var = fh.variables["latitude"]
    time_var = fh.variables["time"]
    return lon_var, lat_var, time_var

########################################################################################

if __name__ == "__main__":
    # these are anomalies
    sst_data, sic_data, sic_ref, sst_ref, sic_hadisst, mv = load_hadisst_data()
    
    # this is the full hadisst data
    hadisst_sst_fname = get_HadISST_input_filename(400)
    sst_hadisst = load_data(hadisst_sst_fname, "sst")
    
    lon_var, lat_var, time_var = get_HadISST_lon_lat_time_vars()
    
    # subset to 1850->2010
    had_sic_data = sic_hadisst[:]
    had_sst_data = sst_hadisst[:]
    S = 0
    E = sst_hadisst.shape[0]
    
    years = [[y-5, y+5] for y in range(1850,2010,10)]
            
#    mapping = calc_sic_mapping(sst_data, sic_data, mv)
#    save_mapping("test_map.nc", mapping, lat_var, lon_var, years, mv)

    # calculate the anomaly
def create_Ma_syn_SSTs(run_type, ref_start, ref_end, sy, ey, eof_year, neofs, ptile, monthly):

    # determine which hadisst ensemble member to use
    hadisst_ens_members = [1059, 115, 1169, 1194, 1346, 137, 1466, 396, 400, 69]
    run_n = hadisst_ens_members[numpy.random.randint(0, len(hadisst_ens_members))]

    # load the CMIP5 ensemble mean timeseries
    # load the ensemble mean of the anomalies
    cmip5_ens_mean_anoms_fname = get_concat_anom_sst_ens_mean_smooth_fname(run_type, ref_start, ref_end, monthly)
    cmip5_ens_mean_anoms = load_sst_data(cmip5_ens_mean_anoms_fname, "sst")

    # load the eof patterns in the eof_year
    eof_fname = get_cmip5_EOF_filename(run_type, ref_start, ref_end, eof_year, monthly=True)
    eofs = load_sst_data(eof_fname, "sst")
    
    # load the principle components for the eof_year
    syn_pc_fname  = get_Ma_syn_SST_PCs_filename(run_type, ref_start, ref_end, eof_year, ptile, monthly=True)
    syn_pc = load_data(syn_pc_fname, "sst")
    
    # load the timeseries of scalings and offsets to the pcs over the CMIP5 period
    proj_pc_scale_fname = get_cmip5_proj_PC_scale_filename(run_type, ref_start, ref_end, eof_year, monthly=True)
    proj_pc_scale  = load_data(proj_pc_scale_fname, "sst_scale")
    proj_pc_offset = load_data(proj_pc_scale_fname, "sst_offset")
    
    # corresponding weights that we supplied to the EOF function
    coslat = numpy.cos(numpy.deg2rad(numpy.arange(89.5, -90.5,-1.0))).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]

    # create the timeseries of reconstructed SSTs for just this sample
    # recreate the field - monthy by month
    # pattern number
    pn = 0

    nmons=12
    # sub set the mean anomalies and the proj_pc_scale and offset
    cmip5_sy = 1899
    si = (sy-cmip5_sy)*12
    ei = (ey-cmip5_sy)*12
    cmip5_ens_mean_anoms = cmip5_ens_mean_anoms[si:ei]

    if ey == 2101:
        # create 2101
        S = cmip5_ens_mean_anoms.shape
        cmip5_ens_mean_anoms2 = numpy.zeros([S[0]+12, S[1], S[2]], 'f')
        cmip5_ens_mean_anoms2[:S[0]] = cmip5_ens_mean_anoms
        cmip5_ens_mean_anoms2[-12:] = cmip5_ens_mean_anoms[-12:]
        cmip5_ens_mean_anoms = cmip5_ens_mean_anoms2

    proj_pc_scale = proj_pc_scale[si-12:ei]
    proj_pc_offset = proj_pc_offset[si-12:ei]
    syn_sst_rcp = numpy.ma.zeros([proj_pc_scale.shape[0], eofs.shape[2], eofs.shape[3]], 'f')
    #
    for pn in range(0, 2):  # two patterns per percentile
        for m in range(0, nmons):
            pc_ts = syn_pc[m,pn,:neofs] * proj_pc_scale[m::12,:neofs] + proj_pc_offset[m::12,:neofs]
            syn_sst_rcp[m::12] = reconstruct_field(pc_ts, eofs[m], neofs, wgts)

        # load the hadisst reference
        n_repeats = cmip5_ens_mean_anoms.shape[0] / 12       # number of repeats = number of years
        hadisst_ac = create_hadisst_monthly_reference(run_type, ref_start, ref_end, n_repeats, run_n)
        # load the internal variability - we are only interested in the 30 year observed ones
        resid_fname = get_HadISST_monthly_residuals_fname(1899, 2010, 400)
        intvar = load_data(resid_fname, "sst")
        intvar = intvar[(1973-1899)*12:(2007-1899)*12]
        print "cmip5_ens_mean_anoms ", cmip5_ens_mean_anoms.shape
        print "syn_sst_rcp ", syn_sst_rcp.shape
        print "hadisst_ac ", hadisst_ac.shape
        print "intvar ", intvar.shape
        out_data = cmip5_ens_mean_anoms + syn_sst_rcp + hadisst_ac + intvar
        # save the synthetic ssts
        save_Ma_syn_SSTs(out_data, run_type, ref_start, ref_end, sy, ey, ptile, pn)
示例#24
0
def create_cmip5_rcp_anomalies(run_type,
                               ref_start,
                               ref_end,
                               eof_year,
                               percentile,
                               monthly=True):
    # create the time series of anomalies from the mean of the various
    # samples in the CMIP5 ensemble
    # This spans the uncertainty of the GMT response to GHG forcing in CMIP5

    if run_type == "likely":
        load_run_type = "rcp45"
    else:
        load_run_type = run_type

    # load the eof patterns in the eof_year
    eof_fname = get_cmip5_EOF_filename(load_run_type,
                                       ref_start,
                                       ref_end,
                                       eof_year,
                                       monthly=monthly)
    eofs = load_sst_data(eof_fname, "sst")

    # load the principle components for the eof_year
    syn_pc_fname = get_syn_SST_PCs_filename(load_run_type,
                                            ref_start,
                                            ref_end,
                                            eof_year,
                                            monthly=monthly)
    syn_pc_fname_new = syn_pc_fname[:-3] + "_new.nc"
    syn_pc = load_data(syn_pc_fname_new, "sst")

    # load the timeseries of scalings and offsets to the pcs over the CMIP5 period
    proj_pc_scale_fname = get_cmip5_proj_PC_scale_filename(load_run_type,
                                                           ref_start,
                                                           ref_end,
                                                           eof_year,
                                                           monthly=monthly)
    proj_pc_scale = load_data(proj_pc_scale_fname, "sst_scale")
    proj_pc_offset = load_data(proj_pc_scale_fname, "sst_offset")

    # corresponding weights that we supplied to the EOF function
    coslat = numpy.cos(numpy.deg2rad(numpy.arange(89.5, -90.5,
                                                  -1.0))).clip(0., 1.)
    wgts = numpy.sqrt(coslat)[..., numpy.newaxis]

    # create the timeseries of reconstructed SSTs for just this sample
    # recreate the field - monthy by month if necessary
    if monthly:
        syn_sst_rcp = numpy.ma.zeros(
            [proj_pc_scale.shape[0], eofs.shape[2], eofs.shape[3]], 'f')
        for m in range(0, 12):
            pc_ts = syn_pc[m, percentile, :neofs] * proj_pc_scale[
                m::12, :neofs] + proj_pc_offset[m::12, :neofs]
            syn_sst_rcp[m::12] = reconstruct_field(pc_ts, eofs[m], neofs, wgts)
    else:
        pc_ts = syn_pc[
            0, percentile, :
            neofs] * proj_pc_scale[:, :neofs] + proj_pc_offset[:, :neofs]
        syn_sst_rcp = reconstruct_field(pc_ts, eofs[0], neofs, wgts)
    return syn_sst_rcp