예제 #1
0
def kopp14SROCC_postprocess_icesheets(samptype, focus_site_ids, pipeline_id):
	
	# Read in the fitted parameters from parfile
	projfile = "{}_projections.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open proj file\n")
		sys.exit(1)
	
	# Extract the data from the file
	my_data = pickle.load(f)
	projdata = my_data[samptype]
	targyears = my_data['targyears']
	f.close() 
	
	# Read in the scenario from the corr file
	projfile = "{}_corr.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open corr file\n")
		sys.exit(1)
	
	# Extract the data from the file
	my_data = pickle.load(f)
	scenario = my_data['scenario']
	f.close() 
	
	# Load the site locations	
	ratefile = os.path.join(os.path.dirname(__file__), "bkgdrate.tsv")
	(_, site_ids, site_lats, site_lons) = read_bkgdrate(ratefile, True)
	
	# Test to make sure the list of sites are valid
	if np.any([x >= 0 for x in focus_site_ids]):
		_, _, site_inds = np.intersect1d(focus_site_ids, site_ids, return_indices=True)
		site_ids = site_ids[site_inds]
		site_lats = site_lats[site_inds]
		site_lons = site_lons[site_inds]
	
	
	
	# Get the fingerprints for all sites from all ice sheets
	fpdir = os.path.join(os.path.dirname(__file__), "FPRINT")
	gisfp = AssignFP(os.path.join(fpdir,"fprint_gis.nc"), site_lats, site_lons)
	waisfp = AssignFP(os.path.join(fpdir,"fprint_wais.nc"), site_lats, site_lons)
	eaisfp = AssignFP(os.path.join(fpdir,"fprint_eais.nc"), site_lats, site_lons)
	
	# Multiply the fingerprints and the projections
	gissl = np.multiply.outer(projdata[:,:,0], gisfp)
	waissl = np.multiply.outer(projdata[:,:,1], waisfp)
	eaissl = np.multiply.outer(projdata[:,:,2], eaisfp)
	
	# Write to netcdf
	writeNetCDF(gissl, pipeline_id, "GIS", targyears, site_lats, site_lons, site_ids)
	writeNetCDF(waissl, pipeline_id, "WAIS", targyears, site_lats, site_lons, site_ids)
	writeNetCDF(eaissl, pipeline_id, "EAIS", targyears, site_lats, site_lons, site_ids)
예제 #2
0
def ar5_postprocess_thermalexp(focus_site_ids, pipeline_id):

    # Load the projection file
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projection file {}\n").format(projfile)

    # Extract the configuration variables
    my_proj = pickle.load(f)
    f.close()

    targyears = my_proj["data_years"]
    thermsamps = my_proj["zx"]
    startyr = my_proj["startyr"]
    rcp_scenario = my_proj['scenario']

    # Load the site locations
    ratefile = os.path.join(os.path.dirname(__file__), "bkgdrate.tsv")
    (_, site_ids, site_lats, site_lons) = read_bkgdrate(ratefile, True)

    # FOR SIMPLICITY, LOCALIZE TO ONLY A FEW LOCATIONS
    if np.any([x >= 0 for x in focus_site_ids]):
        _, _, site_inds = np.intersect1d(focus_site_ids,
                                         site_ids,
                                         return_indices=True)
        site_ids = site_ids[site_inds]
        site_lats = site_lats[site_inds]
        site_lons = site_lons[site_inds]

    # Initialize variable to hold the localized projections
    (nsamps, ntimes) = thermsamps.shape
    nsites = len(site_ids)

    # Apply the effective fingerprint of "1" to the global projections for each site
    local_sl = np.tile(thermsamps, (nsites, 1, 1))

    # Calculate the quantiles
    out_q = np.unique(
        np.append(np.linspace(0, 1, 101),
                  (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99,
                   0.995, 0.999)))
    nq = len(out_q)
    local_sl_q = np.nanquantile(local_sl, out_q, axis=1)

    # Calculate the mean and sd of the samples
    local_sl_mean = np.nanmean(local_sl, axis=1)
    local_sl_sd = np.nanstd(local_sl, axis=1)

    # Write the localized projections to a netcdf file
    rootgrp = Dataset(os.path.join(os.path.dirname(__file__),
                                   "{}_localsl.nc".format(pipeline_id)),
                      "w",
                      format="NETCDF4")

    # Define Dimensions
    site_dim = rootgrp.createDimension("nsites", nsites)
    year_dim = rootgrp.createDimension("years", ntimes)
    q_dim = rootgrp.createDimension("quantiles", nq)

    # Populate dimension variables
    lat_var = rootgrp.createVariable("lat", "f4", ("nsites", ))
    lon_var = rootgrp.createVariable("lon", "f4", ("nsites", ))
    id_var = rootgrp.createVariable("id", "i4", ("nsites", ))
    year_var = rootgrp.createVariable("years", "i4", ("years", ))
    q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles", ))

    # Create a data variable
    localslq = rootgrp.createVariable("localSL_quantiles",
                                      "f4", ("quantiles", "nsites", "years"),
                                      zlib=True,
                                      least_significant_digit=2)
    localslmean = rootgrp.createVariable("localSL_mean",
                                         "f4", ("nsites", "years"),
                                         zlib=True,
                                         least_significant_digit=2)
    localslsd = rootgrp.createVariable("localSL_std",
                                       "f4", ("nsites", "years"),
                                       zlib=True,
                                       least_significant_digit=2)

    # Assign attributes
    rootgrp.description = "Local SLR contributions from thermal expansion according to AR5 workflow"
    rootgrp.history = "Created " + time.ctime(time.time())
    rootgrp.source = "FACTS: {0} - {1}, Start year = {2}".format(
        pipeline_id, rcp_scenario, startyr)
    lat_var.units = "Degrees North"
    lon_var.units = "Degrees East"
    localslq.units = "mm"
    localslmean.units = "mm"
    localslsd.units = "mm"

    # Put the data into the netcdf variables
    lat_var[:] = site_lats
    lon_var[:] = site_lons
    id_var[:] = site_ids
    year_var[:] = targyears
    q_var[:] = out_q
    localslq[:, :, :] = local_sl_q
    localslmean[:, :] = local_sl_mean
    localslsd[:, :] = local_sl_sd

    # Close the netcdf
    rootgrp.close()
def kopp14_preprocess_oceandynamics(rcp_scenario, zostoga_modeldir,
                                    zos_modeldir, driftcorr, focus_site_ids,
                                    pipeline_id):

    # Define variables
    datayears = np.arange(1861, 2300)
    targyears = np.arange(2010, 2101, 10)
    mergeZOSZOSTOGA = True
    smoothwin = 19
    baseyear = 2005
    GCMprobscale = 0.833
    maxDOF = np.inf

    #--------- Begin Thermal Expansion ---------------------------------------------------
    # Read in the ZOSTOGA data
    zostoga_modeldir = os.path.join(zostoga_modeldir, rcp_scenario)
    (zostoga_modellist, ZOSTOGA) = IncludeModels(zostoga_modeldir,
                                                 ("ZOSTOGA", "ZOSGA"),
                                                 datayears)

    # Center, suture, and smooth ZOSTOGA
    sZOSTOGA = np.nan * ZOSTOGA
    for i in np.arange(0, ZOSTOGA.shape[1]):
        (ZOSTOGA[:, i], sZOSTOGA[:,
                                 i]) = SmoothZOSTOGA(ZOSTOGA[:, i], datayears,
                                                     baseyear, smoothwin)

    # Apply the drift correction if needed
    if (driftcorr):
        gslfile = os.path.join(os.path.dirname(__file__),
                               "CSIRO_Recons_gmsl_yr_2011.csv")
        (sZOSTOGA, CWdrift, histGICrate,
         selectyears) = DriftCorr(sZOSTOGA, datayears, baseyear, rcp_scenario,
                                  gslfile)
    else:
        CWdrift = np.nan
        histGICrate = np.nan
        selectyears = np.nan

    # Store the configuration in a pickle
    output = {'rcp_scenario': rcp_scenario, 'datayears': datayears,\
     'targyears': targyears, 'mergeZOSZOSTOGA': mergeZOSZOSTOGA,\
     'smoothwin': smoothwin, 'driftcorr': driftcorr, 'baseyear': baseyear,\
     'GCMprobscale': GCMprobscale, 'maxDOF': maxDOF}

    # Write the configuration to a file
    outdir = os.path.dirname(__file__)
    outfile = open(os.path.join(outdir, "{}_config.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()

    # Store the ZOSTOGA variables in a pickle
    output = {'sZOSTOGA': sZOSTOGA, 'zostoga_modellist': zostoga_modellist, 'CWdrift': CWdrift,\
     'histGICrate': histGICrate, 'selectyears': selectyears}

    # Write the ZOSTOGA variables to a file
    outfile = open(os.path.join(outdir, "{}_ZOSTOGA.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()

    #------------ Begin Ocean Dynamics ---------------------------------------------------

    # Load the site locations
    ratefile = os.path.join(os.path.dirname(__file__), "bkgdrate.tsv")
    (_, targregion_ids, targregion_lats,
     targregion_lons) = read_bkgdrate(ratefile, True)

    # Make sure all the requested IDs are available
    if np.any([x >= 0 for x in focus_site_ids]):
        missing_ids = np.setdiff1d(focus_site_ids, targregion_ids)
        if (len(missing_ids) != 0):
            missing_ids_string = ",".join(str(this) for this in missing_ids)
            raise Exception("The following IDs are not available: {}".format(
                missing_ids_string))

        # Map the requested site IDs to target regions
        focus_site_ids_map = np.flatnonzero(
            np.isin(targregion_ids, focus_site_ids))
        focus_site_ids = targregion_ids[focus_site_ids_map]
        focus_site_lats = targregion_lats[focus_site_ids_map]
        focus_site_lons = targregion_lons[focus_site_ids_map]
    else:
        focus_site_ids = targregion_ids
        focus_site_lats = targregion_lats
        focus_site_lons = targregion_lons

    # Load the ZOS data
    (zos_modellist, ZOS_raw) = IncludeDABZOSModels(zos_modeldir, rcp_scenario,
                                                   focus_site_lats,
                                                   focus_site_lons)

    # Find the overlap between ZOS and ZOSTOGA
    comb_modellist, zostoga_model_idx, zos_model_idx = np.intersect1d(
        zostoga_modellist, zos_modellist, return_indices=True)
    '''
	NOTE: POTENTIAL BUG IN ORIGINAL CODE
	The original code uses the smoothed ZOSTOGA data as the raw ZOSTOGA values. This in
	turn smooths the ZOSTOGA data over the 'smoothwin' period twice. To replicate the
	bug, set 'ZOSTOGAadj' to a subset of 'sZOSTOGA' instead of 'ZOSTOGA'.
	'''
    ZOSTOGAadj = sZOSTOGA[:, zostoga_model_idx]  # Replicate potential bug
    #ZOSTOGAadj = ZOSTOGA[:,zostoga_model_idx]  # Fix for potential bug
    ZOS_raw = ZOS_raw[:, zos_model_idx, :]

    # Should we merge ZOSTOGA and ZOS?
    # NOTE: ZOS starts at 1860 while ZOSTOGA starts at 1861. Pop off the 1860 value for
    # ZOS and merge if necessary
    if (mergeZOSZOSTOGA):
        ZOS = ZOS_raw + ZOSTOGAadj[:, :, np.newaxis]
    else:
        ZOS = ZOS_raw

    # Smooth ZOS and ZOSTOGA over 19 year smoothing window
    def nanSmooth(x, w):
        idx = np.flatnonzero(~np.isnan(x))
        temp = x
        temp[idx] = Smooth(x[idx], w)
        return (temp)

    sZOS = np.apply_along_axis(nanSmooth, axis=0, arr=ZOS, w=smoothwin)
    sZOSTOGAadj = np.apply_along_axis(nanSmooth,
                                      axis=0,
                                      arr=ZOSTOGAadj,
                                      w=smoothwin)

    # Center the smoothed ZOS/ZOSTOGAadj to the baseyear
    baseyear_idx = np.flatnonzero(datayears == baseyear)
    sZOS = np.apply_along_axis(lambda z, idx: z - z[idx],
                               axis=0,
                               arr=sZOS,
                               idx=baseyear_idx)
    sZOSTOGAadj = np.apply_along_axis(lambda z, idx: z - z[idx],
                                      axis=0,
                                      arr=sZOSTOGAadj,
                                      idx=baseyear_idx)

    # Store the ZOS variable in a pickle
    output = {'sZOS': sZOS, 'zos_modellist': zos_modellist, 'datayears': datayears, \
       'focus_site_ids': focus_site_ids, 'focus_site_lats': focus_site_lats, \
       'focus_site_lons': focus_site_lons, 'sZOSTOGAadj': sZOSTOGAadj, 'comb_modellist': comb_modellist}

    # Write the ZOS variables to a file
    outfile = open(os.path.join(outdir, "{}_ZOS.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()
예제 #4
0
def ar5_postprocess_glacierscmip6(focus_site_ids, pipeline_id):

    # Read in the global projections
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projfile\n")
        sys.exit(1)

    # Extract the projection data from the file
    my_data = pickle.load(f)
    gicsamps = my_data["gicsamps"]
    glac_region_names = my_data["glac_region_names"]
    data_years = my_data["data_years"]
    f.close()

    # Read in the configuration information
    configfile = "{}_data.pkl".format(pipeline_id)
    try:
        f = open(configfile, 'rb')
    except:
        print("Cannot open configfile\n")
        sys.exit(1)

    # Extract the configuration data
    my_data = pickle.load(f)
    scenario = my_data["scenario"]
    include_models = my_data['include_models']
    include_scenarios = my_data['include_scenarios']
    nmodels = len(include_models)
    f.close()

    # Produce the included model string
    model_string_pieces = [
        "{0}-{1}".format(include_models[x], include_scenarios[x])
        for x in np.arange(nmodels)
    ]
    model_string = "Models and scenarios included: " + ", ".join(
        model_string_pieces)

    # Load the site locations
    ratefilename = "bkgdrate.tsv"
    ratefile = os.path.join(os.path.dirname(__file__), ratefilename)
    (_, site_ids, site_lats, site_lons) = read_bkgdrate(ratefile, True)

    # FOR SIMPLICITY, LOCALIZE TO ONLY A FEW LOCATIONS
    if np.any([x >= 0 for x in focus_site_ids]):
        _, _, site_inds = np.intersect1d(focus_site_ids,
                                         site_ids,
                                         return_indices=True)
        site_ids = site_ids[site_inds]
        site_lats = site_lats[site_inds]
        site_lons = site_lons[site_inds]

    # Initialize variable to hold the localized projections
    (nsamps, nregions, ntimes) = gicsamps.shape
    nsites = len(site_ids)
    local_sl = np.full((nsites, nsamps, ntimes), 0.0)

    # Loop through the GIC regions
    for i in np.arange(0, nregions):

        # Get the fingerprint file name for this region
        thisRegion = glac_region_names[i]

        # Get the fingerprints for these sites from this region
        regionfile = os.path.join(os.path.dirname(__file__), "FPRINT",
                                  "fprint_{0}.nc".format(thisRegion))
        regionfp = AssignFP(regionfile, site_lats, site_lons)

        # Multiply the fingerprints and the projections and add them to the running total
        # over the regions
        local_sl += np.transpose(
            np.multiply.outer(gicsamps[:, i, :], regionfp), (2, 0, 1))

    # Calculate the quantiles
    out_q = np.unique(
        np.append(np.linspace(0, 1, 101),
                  (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99,
                   0.995, 0.999)))
    nq = len(out_q)
    local_sl_q = np.nanquantile(local_sl, out_q, axis=1)

    # Calculate the mean and sd of the samples
    local_sl_mean = np.nanmean(local_sl, axis=1)
    local_sl_sd = np.nanstd(local_sl, axis=1)

    # Write the localized projections to a netcdf file
    rootgrp = Dataset(os.path.join(os.path.dirname(__file__),
                                   "{}_localsl.nc".format(pipeline_id)),
                      "w",
                      format="NETCDF4")

    # Define Dimensions
    site_dim = rootgrp.createDimension("nsites", nsites)
    year_dim = rootgrp.createDimension("years", ntimes)
    q_dim = rootgrp.createDimension("quantiles", nq)

    # Populate dimension variables
    lat_var = rootgrp.createVariable("lat", "f4", ("nsites", ))
    lon_var = rootgrp.createVariable("lon", "f4", ("nsites", ))
    id_var = rootgrp.createVariable("id", "i4", ("nsites", ))
    year_var = rootgrp.createVariable("years", "i4", ("years", ))
    q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles", ))

    # Create a data variable
    localslq = rootgrp.createVariable("localSL_quantiles",
                                      "f4", ("quantiles", "nsites", "years"),
                                      zlib=True,
                                      least_significant_digit=2)
    localslmean = rootgrp.createVariable("localSL_mean",
                                         "f4", ("nsites", "years"),
                                         zlib=True,
                                         least_significant_digit=2)
    localslsd = rootgrp.createVariable("localSL_std",
                                       "f4", ("nsites", "years"),
                                       zlib=True,
                                       least_significant_digit=2)

    # Assign attributes
    rootgrp.description = "Local SLR contributions from glaciers and ice caps according to AR5 glacier_cmip6 workflow"
    rootgrp.history = "Created " + time.ctime(time.time())
    rootgrp.source = "FACTS: AR5 Glacier-CMIP6 workflow - {0}. ".format(
        scenario) + model_string
    lat_var.units = "Degrees North"
    lon_var.units = "Degrees East"
    localslq.units = "mm"
    localslmean.units = "mm"
    localslsd.units = "mm"

    # Put the data into the netcdf variables
    lat_var[:] = site_lats
    lon_var[:] = site_lons
    id_var[:] = site_ids
    year_var[:] = data_years
    q_var[:] = out_q
    localslq[:, :, :] = local_sl_q
    localslmean[:, :] = local_sl_mean
    localslsd[:, :] = local_sl_sd

    # Close the netcdf
    rootgrp.close()
예제 #5
0
def ssp_postprocess_landwaterstorage(focus_site_ids, pipeline_id):

    # Load the configuration file
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projection file {}\n").format(projfile)

    # Extract the configuration variables
    my_proj = pickle.load(f)
    f.close()

    targyears = my_proj["years"]
    scen = my_proj['scen']
    lwssamps = np.transpose(my_proj["lwssamps"])

    # Load the site locations
    ratefile = os.path.join(os.path.dirname(__file__), "bkgdrate.tsv")
    (_, site_ids, site_lats, site_lons) = read_bkgdrate(ratefile, True)

    # Match the user selected sites to those in the PSMSL data
    if np.any([x >= 0 for x in focus_site_ids]):
        _, _, site_inds = np.intersect1d(focus_site_ids,
                                         site_ids,
                                         return_indices=True)
        site_ids = site_ids[site_inds]
        site_lats = site_lats[site_inds]
        site_lons = site_lons[site_inds]

    # Initialize variable to hold the localized projections
    (nsamps, ntimes) = lwssamps.shape
    nsites = len(site_ids)

    # Apply the fingerprints
    fpfile = os.path.join(os.path.dirname(__file__),
                          "REL_GROUNDWATER_NOMASK.nc")
    fpsites = AssignFP(fpfile, site_lats, site_lons)
    local_sl = lwssamps[np.newaxis, :, :] * fpsites[:, np.newaxis, np.newaxis]

    # Calculate the quantiles
    out_q = np.unique(
        np.append(np.linspace(0, 1, 101),
                  (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99,
                   0.995, 0.999)))
    nq = len(out_q)
    local_sl_q = np.nanquantile(local_sl, out_q, axis=1)

    # Calculate the mean and sd of the samples
    local_sl_mean = np.nanmean(local_sl, axis=1)
    local_sl_sd = np.nanstd(local_sl, axis=1)

    # Write the localized projections to a netcdf file
    rootgrp = Dataset(os.path.join(os.path.dirname(__file__),
                                   "{}_localsl.nc".format(pipeline_id)),
                      "w",
                      format="NETCDF4")

    # Define Dimensions
    site_dim = rootgrp.createDimension("nsites", nsites)
    year_dim = rootgrp.createDimension("years", ntimes)
    q_dim = rootgrp.createDimension("quantiles", nq)

    # Populate dimension variables
    lat_var = rootgrp.createVariable("lat", "f4", ("nsites", ))
    lon_var = rootgrp.createVariable("lon", "f4", ("nsites", ))
    id_var = rootgrp.createVariable("id", "i4", ("nsites", ))
    year_var = rootgrp.createVariable("years", "i4", ("years", ))
    q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles", ))

    # Create a data variable
    localslq = rootgrp.createVariable("localSL_quantiles",
                                      "f4", ("quantiles", "nsites", "years"),
                                      zlib=True,
                                      least_significant_digit=2)
    localslmean = rootgrp.createVariable("localSL_mean",
                                         "f4", ("nsites", "years"),
                                         zlib=True,
                                         least_significant_digit=2)
    localslsd = rootgrp.createVariable("localSL_std",
                                       "f4", ("nsites", "years"),
                                       zlib=True,
                                       least_significant_digit=2)

    # Assign attributes
    rootgrp.description = "Local SLR contributions from land water storage from the SSP module set"
    rootgrp.history = "Created " + time.ctime(time.time())
    rootgrp.source = "FACTS: {0} - {1}".format(pipeline_id, scen)
    lat_var.units = "Degrees North"
    lon_var.units = "Degrees West"
    localslq.units = "mm"
    localslmean.units = "mm"
    localslsd.units = "mm"

    # Put the data into the netcdf variables
    lat_var[:] = site_lats
    lon_var[:] = site_lons
    id_var[:] = site_ids
    year_var[:] = targyears
    q_var[:] = out_q
    localslq[:, :, :] = local_sl_q
    localslmean[:, :] = local_sl_mean
    localslsd[:, :] = local_sl_sd

    # Close the netcdf
    rootgrp.close()