def ssp_postprocess_landwaterstorage(locationfilename, chunksize, pipeline_id):

	# Load the configuration file
	projfile = "{}_projections.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open projection file {}\n").format(projfile)

	# Extract the configuration variables
	my_proj = pickle.load(f)
	f.close()

	targyears = my_proj["years"]
	scenario = my_proj['scen']
	baseyear = my_proj['baseyear']
	lwssamps = np.transpose(my_proj["lwssamps"])

	# Load the site locations
	locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
	(_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

	# Initialize variable to hold the localized projections
	nsamps = lwssamps.shape[0]
	nyears = len(targyears)
	nsites = len(site_ids)

	# Apply the fingerprints
	fpfile = os.path.join(os.path.dirname(__file__), "REL_GROUNDWATER_NOMASK.nc")
	fpsites = da.array(AssignFP(fpfile, site_lats, site_lons))
	fpsites = fpsites.rechunk(chunksize)

	# Calculate the local sl samples
	local_sl = np.multiply.outer(lwssamps, fpsites)

	# Define the missing value for the netCDF files
	nc_missing_value = np.iinfo(np.int16).min

	# Create the xarray data structures for the localized projections
	ncvar_attributes = {"description": "Local SLR contributions from land water storage according to Kopp 2014 workflow",
			"history": "Created " + time.ctime(time.time()),
			"source": "SLR Framework: Kopp 2014 workflow",
			"scenario": scenario,
			"baseyear": baseyear}

	lws_out = xr.Dataset({"sea_level_change": (("samples", "years", "locations"), local_sl, {"units":"mm", "missing_value":nc_missing_value}),
							"lat": (("locations"), site_lats),
							"lon": (("locations"), site_lons)},
		coords={"years": targyears, "locations": site_ids, "samples": np.arange(nsamps)}, attrs=ncvar_attributes)

	lws_out.to_netcdf("{0}_localsl.nc".format(pipeline_id), encoding={"sea_level_change": {"dtype": "i2", "zlib": True, "complevel":4, "_FillValue": nc_missing_value}})


	return(None)
def ipccar6_postprocess_icesheet(locationfilename, pipeline_id):

    # Read in the fitted parameters from parfile
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projfile\n")
        sys.exit(1)

    # Extract the data from the file
    my_data = pickle.load(f)
    f.close()

    eais_samps = my_data["eais_samps"]
    wais_samps = my_data["wais_samps"]
    pen_samps = my_data["pen_samps"]
    gis_samps = my_data["gis_samps"]
    targyears = my_data["years"]
    scenario = my_data["scenario"]
    model_driver = my_data["model_driver"]

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Get the fingerprints for all sites from all ice sheets
    fpdir = os.path.join(os.path.dirname(__file__), "FPRINT")
    gisfp = AssignFP(os.path.join(fpdir, "fprint_gis.nc"), site_lats,
                     site_lons)
    waisfp = AssignFP(os.path.join(fpdir, "fprint_wais.nc"), site_lats,
                      site_lons)
    eaisfp = AssignFP(os.path.join(fpdir, "fprint_eais.nc"), site_lats,
                      site_lons)

    # Multiply the fingerprints and the projections
    gissl = np.multiply.outer(gis_samps, gisfp)
    waissl = np.multiply.outer(wais_samps + pen_samps, waisfp)
    eaissl = np.multiply.outer(eais_samps, eaisfp)

    # Write to netcdf
    writeNetCDF(gissl, pipeline_id, "GIS", targyears, site_lats, site_lons,
                site_ids, scenario, model_driver)
    writeNetCDF(waissl, pipeline_id, "WAIS", targyears, site_lats, site_lons,
                site_ids, scenario, model_driver)
    writeNetCDF(eaissl, pipeline_id, "EAIS", targyears, site_lats, site_lons,
                site_ids, scenario, model_driver)
    writeNetCDF(eaissl + waissl, pipeline_id, "AIS", targyears, site_lats,
                site_lons, site_ids, scenario, model_driver)
def ipccar6_postprocess_larmipicesheet(locationfilename, pipeline_id):

    # Read in the fitted parameters from parfile
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projfile\n")
        sys.exit(1)

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Extract the data from the file
    my_data = pickle.load(f)
    waissamps = my_data['wais_samps'].T
    eaissamps = my_data['eais_samps'].T
    pensamps = my_data['pen_samps'].T
    targyears = my_data['years']
    scenario = my_data['scenario']
    f.close()

    # Get the samples from the samps dictionary
    waissamps = waissamps + pensamps

    # Get the fingerprints for all sites from all ice sheets
    fpdir = os.path.join(os.path.dirname(__file__), "FPRINT")
    waisfp = AssignFP(os.path.join(fpdir, "fprint_wais.nc"), site_lats,
                      site_lons)
    eaisfp = AssignFP(os.path.join(fpdir, "fprint_eais.nc"), site_lats,
                      site_lons)

    # Multiply the fingerprints and the projections
    waissl = np.multiply.outer(waissamps, waisfp)
    eaissl = np.multiply.outer(eaissamps, eaisfp)

    # Write to netcdf
    writeNetCDF(waissl, pipeline_id, "WAIS", targyears, site_lats, site_lons,
                site_ids, scenario)
    writeNetCDF(eaissl, pipeline_id, "EAIS", targyears, site_lats, site_lons,
                site_ids, scenario)
    writeNetCDF(waissl + eaissl, pipeline_id, "AIS", targyears, site_lats,
                site_lons, site_ids, scenario)

    return (0)
Ejemplo n.º 4
0
def tlm_preprocess_oceandynamics(scenario, modeldir, driftcorr, no_correlation,
                                 pyear_start, pyear_end, pyear_step,
                                 locationfilename, baseyear, pipeline_id):

    # Define variables
    datayears = np.arange(1861, 2301)
    targyears = np.arange(pyear_start, pyear_end + 1, pyear_step)
    smoothwin = 19
    GCMprobscale = 0.833
    maxDOF = np.iinfo(np.int32).max

    # Define the list of input models and scenarios
    tasdir = os.path.join(modeldir, "tas")
    zos_modeldir = os.path.join(modeldir, "zos")
    zostoga_modeldir = os.path.join(modeldir, "zostoga")
    (include_models,
     include_scenarios) = FindInputModels(tasdir, zos_modeldir, scenario)

    if (not include_models):
        raise Exception(
            "No models found for this scenario or temperature target - {}".
            format(scenario))

    # Turn off correlation if this is a temperature target run
    #if(re.search("^tlim", scenario)):
    #	no_correlation = True

    # Turn off merging ZOS and ZOSTOGA if no_correlation
    #if no_correlation:
    #	mergeZOSZOSTOGA = 0
    #else:
    #	mergeZOSZOSTOGA = 1

    # Merging is done in the postprocessing stage automatically.
    mergeZOSZOSTOGA = 0

    # Read in the ZOSTOGA data
    (zostoga_modellist, zostoga_scenariolist,
     ZOSTOGA) = IncludeCMIP6Models(zostoga_modeldir, 'zostoga', datayears,
                                   include_models, include_scenarios)

    # Center, suture, and smooth ZOSTOGA
    sZOSTOGA = np.nan * ZOSTOGA
    for i in np.arange(0, ZOSTOGA.shape[1]):
        (ZOSTOGA[:, i], sZOSTOGA[:,
                                 i]) = SmoothZOSTOGA(ZOSTOGA[:, i], datayears,
                                                     baseyear, smoothwin)

    # Store the configuration in a pickle
    output = {'datayears': datayears, 'scenario': scenario, \
     'targyears': targyears, 'mergeZOSZOSTOGA': mergeZOSZOSTOGA,\
     'smoothwin': smoothwin, 'driftcorr': driftcorr, 'baseyear': baseyear,\
     'GCMprobscale': GCMprobscale, 'maxDOF': maxDOF, 'no_correlation': no_correlation}

    # Write the configuration to a file
    outdir = os.path.dirname(__file__)
    outfile = open(os.path.join(outdir, "{}_config.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()

    # Store the ZOSTOGA variables in a pickle
    output = {'sZOSTOGA': sZOSTOGA, 'zostoga_modellist': zostoga_modellist, \
       'zostoga_scenariolist': zostoga_scenariolist}

    # Write the ZOSTOGA variables to a file
    outfile = open(os.path.join(outdir, "{}_ZOSTOGA.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()

    #------------ Begin Ocean Dynamics ---------------------------------------------------

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, focus_site_ids, focus_site_lats,
     focus_site_lons) = ReadLocationFile(locationfile)

    # Load the ZOS data
    (zos_modellist, zos_scenariolist,
     ZOS_raw) = IncludeCMIP6ZOSModels(zos_modeldir, "zos", datayears,
                                      include_models, include_scenarios,
                                      focus_site_lats, focus_site_lons)

    # Find the overlap between ZOS and ZOSTOGA
    comb_modellist, zostoga_model_idx, zos_model_idx = np.intersect1d(
        zostoga_modellist, zos_modellist, return_indices=True)
    '''
	#NOTE: POTENTIAL BUG IN ORIGINAL CODE
	#The original code uses the smoothed ZOSTOGA data as the raw ZOSTOGA values. This in
	#turn smooths the ZOSTOGA data over the 'smoothwin' period twice. To replicate the
	#bug, set 'ZOSTOGAadj' to a subset of 'sZOSTOGA' instead of 'ZOSTOGA'.
	'''
    # If no_correlation, do not subset the models to overlap
    if no_correlation:
        ZOSTOGAadj = sZOSTOGA  # Replicate potential bug
        #ZOSTOGAadj = ZOSTOGA  # Fix for potential bug
    else:
        ZOSTOGAadj = sZOSTOGA[:, zostoga_model_idx]  # Replicate potential bug
        #ZOSTOGAadj = ZOSTOGA[:,zostoga_model_idx]  # Fix for potential bug
        ZOS_raw = ZOS_raw[:, zos_model_idx, :]

    # Should we merge ZOSTOGA and ZOS?
    if (mergeZOSZOSTOGA):
        ZOS = ZOS_raw + ZOSTOGAadj[:, :, np.newaxis]
    else:
        ZOS = ZOS_raw

    # Smooth ZOS and ZOSTOGA over 19 year smoothing window
    def nanSmooth(x, w):
        idx = np.flatnonzero(~np.isnan(x))
        temp = x
        if len(idx) > 0:
            temp[idx] = Smooth(x[idx], w)
        return (temp)

    sZOS = np.apply_along_axis(nanSmooth, axis=0, arr=ZOS, w=smoothwin)
    sZOSTOGAadj = np.apply_along_axis(nanSmooth,
                                      axis=0,
                                      arr=ZOSTOGAadj,
                                      w=smoothwin)

    # Center the smoothed ZOS/ZOSTOGAadj to the baseyear
    baseyear_idx = np.flatnonzero(datayears == baseyear)
    sZOS = np.apply_along_axis(lambda z, idx: z - z[idx],
                               axis=0,
                               arr=sZOS,
                               idx=baseyear_idx)
    sZOSTOGAadj = np.apply_along_axis(lambda z, idx: z - z[idx],
                                      axis=0,
                                      arr=sZOSTOGAadj,
                                      idx=baseyear_idx)

    # Store the ZOS variable in a pickle
    output = {'sZOS': sZOS, 'zos_modellist': zos_modellist, 'zos_scenariolist': zos_scenariolist, 'datayears': datayears, \
       'focus_site_ids': focus_site_ids, 'focus_site_lats': focus_site_lats, \
       'focus_site_lons': focus_site_lons, 'sZOSTOGAadj': sZOSTOGAadj, 'comb_modellist': comb_modellist}

    # Write the ZOS variables to a file
    outfile = open(os.path.join(outdir, "{}_ZOS.pkl".format(pipeline_id)),
                   'wb')
    pickle.dump(output, outfile)
    outfile.close()
def ipccar6_postprocess_gmipemuglaciers(locationfilename, chunksize, pipeline_id):

	# Read in the global projections
	projfile = "{}_projections.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open projfile\n")
		sys.exit(1)

	# Extract the projection data from the file
	my_data = pickle.load(f)
	gicsamps = my_data["gic_samps"]
	targyears = my_data["years"]
	scenario = my_data["scenario"]
	baseyear = my_data["baseyear"]
	f.close()

	# Load the fingerprint metadata
	fpfile = os.path.join(os.path.dirname(__file__), "fingerprint_region_map.csv")
	fpmap_data = np.genfromtxt(fpfile, dtype=None, names=True, delimiter=',', encoding=None)

	# Extract the data
	fpmapperids = fpmap_data['IceID']
	fpmaps = fpmap_data['FPID']

	# Load the site locations
	locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
	(_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

	# Initialize variable to hold the localized projections
	#(nregions, nsamps, nyears) = gicsamps.shape
	gicsamps = np.transpose(gicsamps, (1,0,2))
	(nsamps, nregions, nyears) = gicsamps.shape
	nsites = len(site_ids)
	#local_sl = np.full((nsites, nsamps, nyears), 0.0)
	local_sl = da.zeros((nsamps, nyears, nsites), chunks=(-1,-1,chunksize))

	# Loop through the GIC regions
	for i in np.arange(nregions):

		# Get the fingerprint file name for this region
		fp_idx = np.flatnonzero(fpmapperids == i+1)
		thisRegion = fpmaps[fp_idx][0]

		# Get the fingerprints for these sites from this region
		regionfile = os.path.join(os.path.dirname(__file__), "FPRINT", "fprint_{0}.nc".format(thisRegion))
		regionfp = da.from_array(AssignFP(regionfile, site_lats, site_lons), chunks=chunksize)

		# Multiply the fingerprints and the projections and add them to the running total
		# over the regions
		local_sl += np.multiply.outer(gicsamps[:,i,:], regionfp)

	# Define the missing value for the netCDF files
	nc_missing_value = np.iinfo(np.int16).min

	# Create the xarray data structures for the localized projections
	ncvar_attributes = {"description": "Local SLR contributions from glaciers according to GMIP2 emulated workflow",
			"history": "Created " + time.ctime(time.time()),
			"source": "SLR Framework: AR5 workflow",
			"scenario": scenario,
			"baseyear": baseyear}

	glac_out = xr.Dataset({"sea_level_change": (("samples", "years", "locations"), local_sl, {"units":"mm", "missing_value":nc_missing_value}),
							"lat": (("locations"), site_lats),
							"lon": (("locations"), site_lons)},
		coords={"years": targyears, "locations": site_ids, "samples": np.arange(nsamps)}, attrs=ncvar_attributes)

	glac_out.to_netcdf("{0}_localsl.nc".format(pipeline_id), encoding={"sea_level_change": {"dtype": "i2", "zlib": True, "complevel":4, "_FillValue": nc_missing_value}})




	return(None)
def ar5_postprocess_glaciers(locationfilename, pipeline_id):

    # Read in the global projections
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projfile\n")
        sys.exit(1)

    # Extract the projection data from the file
    my_data = pickle.load(f)
    gicsamps = my_data["gicsamps"]
    glac_region_names = my_data["glac_region_names"]
    data_years = my_data["data_years"]
    f.close()

    # Read in the configuration information
    configfile = "{}_data.pkl".format(pipeline_id)
    try:
        f = open(configfile, 'rb')
    except:
        print("Cannot open configfile\n")
        sys.exit(1)

    # Extract the configuration data
    my_data = pickle.load(f)
    scenario = my_data["scenario"]
    #include_models = my_data['include_models']
    #include_scenarios = my_data['include_scenarios']
    #nmodels = len(include_models)
    f.close()

    # Produce the included model string
    #model_string_pieces = ["{0}-{1}".format(include_models[x], include_scenarios[x]) for x in np.arange(nmodels)]
    #model_string = "Models and scenarios included: " + ", ".join(model_string_pieces)
    model_string = ""

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Initialize variable to hold the localized projections
    (nsamps, nregions, ntimes) = gicsamps.shape
    nsites = len(site_ids)
    local_sl = np.full((nsites, nsamps, ntimes), 0.0)

    # Loop through the GIC regions
    for i in np.arange(0, nregions):

        # Get the fingerprint file name for this region
        thisRegion = glac_region_names[i]

        # Get the fingerprints for these sites from this region
        regionfile = os.path.join(os.path.dirname(__file__), "FPRINT",
                                  "fprint_{0}.nc".format(thisRegion))
        regionfp = AssignFP(regionfile, site_lats, site_lons)

        # Multiply the fingerprints and the projections and add them to the running total
        # over the regions
        local_sl += np.transpose(
            np.multiply.outer(gicsamps[:, i, :], regionfp), (2, 0, 1))

    # Calculate the quantiles
    out_q = np.unique(
        np.append(np.linspace(0, 1, 101),
                  (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99,
                   0.995, 0.999)))
    nq = len(out_q)
    local_sl_q = np.nanquantile(local_sl, out_q, axis=1)

    # Write the localized projections to a netcdf file
    rootgrp = Dataset(os.path.join(os.path.dirname(__file__),
                                   "{}_localsl.nc".format(pipeline_id)),
                      "w",
                      format="NETCDF4")

    # Define Dimensions
    site_dim = rootgrp.createDimension("nsites", nsites)
    year_dim = rootgrp.createDimension("years", ntimes)
    q_dim = rootgrp.createDimension("quantiles", nq)

    # Populate dimension variables
    lat_var = rootgrp.createVariable("lat", "f4", ("nsites", ))
    lon_var = rootgrp.createVariable("lon", "f4", ("nsites", ))
    id_var = rootgrp.createVariable("id", "i4", ("nsites", ))
    year_var = rootgrp.createVariable("years", "i4", ("years", ))
    q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles", ))

    # Create a data variable
    localslq = rootgrp.createVariable("localSL_quantiles",
                                      "i2", ("quantiles", "nsites", "years"),
                                      zlib=True,
                                      complevel=4)
    #localslq.scale_factor = 0.1

    # Assign attributes
    rootgrp.description = "Local SLR contributions from glaciers and ice caps according to AR5 glaciers workflow"
    rootgrp.history = "Created " + time.ctime(time.time())
    rootgrp.source = "FACTS: AR5 Glaciers workflow - {0}. ".format(
        scenario) + model_string
    lat_var.units = "Degrees North"
    lon_var.units = "Degrees East"
    localslq.units = "mm"

    # Put the data into the netcdf variables
    lat_var[:] = site_lats
    lon_var[:] = site_lons
    id_var[:] = site_ids
    year_var[:] = data_years
    q_var[:] = out_q
    localslq[:, :, :] = local_sl_q

    # Close the netcdf
    rootgrp.close()
def ipccar6_postprocess_bambericesheet(locationfilename, chunksize,
                                       pipeline_id):

    # Read in the fitted parameters from parfile
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projfile\n")
        sys.exit(1)

    # Extract the data from the file
    my_data = pickle.load(f)
    eais_samps = my_data['eais_samps']
    wais_samps = my_data['wais_samps']
    gis_samps = my_data['gis_samps']
    targyears = my_data['years']
    scenario = my_data['scenario']
    baseyear = my_data['baseyear']
    f.close()

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Get some dimension data from the loaded data structures
    nsamps = eais_samps.shape[0]
    nyears = len(targyears)
    nsites = len(site_ids)

    # Get the fingerprints for all sites from all ice sheets
    fpdir = os.path.join(os.path.dirname(__file__), "FPRINT")
    gisfp = da.array(
        AssignFP(os.path.join(fpdir, "fprint_gis.nc"), site_lats, site_lons))
    waisfp = da.array(
        AssignFP(os.path.join(fpdir, "fprint_wais.nc"), site_lats, site_lons))
    eaisfp = da.array(
        AssignFP(os.path.join(fpdir, "fprint_eais.nc"), site_lats, site_lons))

    # Rechunk the fingerprints for memory
    gisfp = gisfp.rechunk(chunksize)
    waisfp = waisfp.rechunk(chunksize)
    eaisfp = eaisfp.rechunk(chunksize)

    # Apply the fingerprints to the projections
    gissl = np.multiply.outer(gis_samps, gisfp)
    waissl = np.multiply.outer(wais_samps, waisfp)
    eaissl = np.multiply.outer(eais_samps, eaisfp)

    # Add up the east and west components for AIS total
    aissl = waissl + eaissl

    # Define the missing value for the netCDF files
    nc_missing_value = np.iinfo(np.int16).min

    # Create the xarray data structures for the localized projections
    ncvar_attributes = {
        "description":
        "Local SLR contributions from icesheets according to Bamber Icesheet workflow",
        "history": "Created " + time.ctime(time.time()),
        "source": "SLR Framework: Bamber icesheet workflow",
        "scenario": scenario,
        "baseyear": baseyear
    }

    gis_out = xr.Dataset(
        {
            "sea_level_change": (("samples", "years", "locations"), gissl, {
                "units": "mm",
                "missing_value": nc_missing_value
            }),
            "lat": (("locations"), site_lats),
            "lon": (("locations"), site_lons)
        },
        coords={
            "years": targyears,
            "locations": site_ids,
            "samples": np.arange(nsamps)
        },
        attrs=ncvar_attributes)

    wais_out = xr.Dataset(
        {
            "sea_level_change": (("samples", "years", "locations"), waissl, {
                "units": "mm",
                "missing_value": nc_missing_value
            }),
            "lat": (("locations"), site_lats),
            "lon": (("locations"), site_lons)
        },
        coords={
            "years": targyears,
            "locations": site_ids,
            "samples": np.arange(nsamps)
        },
        attrs=ncvar_attributes)

    eais_out = xr.Dataset(
        {
            "sea_level_change": (("samples", "years", "locations"), eaissl, {
                "units": "mm",
                "missing_value": nc_missing_value
            }),
            "lat": (("locations"), site_lats),
            "lon": (("locations"), site_lons)
        },
        coords={
            "years": targyears,
            "locations": site_ids,
            "samples": np.arange(nsamps)
        },
        attrs=ncvar_attributes)

    ais_out = xr.Dataset(
        {
            "sea_level_change": (("samples", "years", "locations"), aissl, {
                "units": "mm",
                "missing_value": nc_missing_value
            }),
            "lat": (("locations"), site_lats),
            "lon": (("locations"), site_lons)
        },
        coords={
            "years": targyears,
            "locations": site_ids,
            "samples": np.arange(nsamps)
        },
        attrs=ncvar_attributes)

    # Write the netcdf output files
    gis_out.to_netcdf("{0}_{1}_localsl.nc".format(pipeline_id, "GIS"),
                      encoding={
                          "sea_level_change": {
                              "dtype": "i2",
                              "zlib": True,
                              "complevel": 4,
                              "_FillValue": nc_missing_value
                          }
                      })
    wais_out.to_netcdf("{0}_{1}_localsl.nc".format(pipeline_id, "WAIS"),
                       encoding={
                           "sea_level_change": {
                               "dtype": "i2",
                               "zlib": True,
                               "complevel": 4,
                               "_FillValue": nc_missing_value
                           }
                       })
    eais_out.to_netcdf("{0}_{1}_localsl.nc".format(pipeline_id, "EAIS"),
                       encoding={
                           "sea_level_change": {
                               "dtype": "i2",
                               "zlib": True,
                               "complevel": 4,
                               "_FillValue": nc_missing_value
                           }
                       })
    ais_out.to_netcdf("{0}_{1}_localsl.nc".format(pipeline_id, "AIS"),
                      encoding={
                          "sea_level_change": {
                              "dtype": "i2",
                              "zlib": True,
                              "complevel": 4,
                              "_FillValue": nc_missing_value
                          }
                      })

    return (None)
def ssp_postprocess_landwaterstorage(locationfilename, pipeline_id):

    # Load the configuration file
    projfile = "{}_projections.pkl".format(pipeline_id)
    try:
        f = open(projfile, 'rb')
    except:
        print("Cannot open projection file {}\n").format(projfile)

    # Extract the configuration variables
    my_proj = pickle.load(f)
    f.close()

    targyears = my_proj["years"]
    scen = my_proj['scen']
    baseyear = my_proj['baseyear']
    lwssamps = np.transpose(my_proj["lwssamps"])

    # Load the site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Initialize variable to hold the localized projections
    (nsamps, ntimes) = lwssamps.shape
    nsites = len(site_ids)

    # Apply the fingerprints
    fpfile = os.path.join(os.path.dirname(__file__),
                          "REL_GROUNDWATER_NOMASK.nc")
    fpsites = AssignFP(fpfile, site_lats, site_lons)
    local_sl = lwssamps[np.newaxis, :, :] * fpsites[:, np.newaxis, np.newaxis]

    # Calculate the quantiles
    out_q = np.unique(
        np.append(np.linspace(0, 1, 101),
                  (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99,
                   0.995, 0.999)))
    nq = len(out_q)
    local_sl_q = np.nanquantile(local_sl, out_q, axis=1)

    # Write the localized projections to a netcdf file
    rootgrp = Dataset(os.path.join(os.path.dirname(__file__),
                                   "{}_localsl.nc".format(pipeline_id)),
                      "w",
                      format="NETCDF4")

    # Define Dimensions
    site_dim = rootgrp.createDimension("nsites", nsites)
    year_dim = rootgrp.createDimension("years", ntimes)
    q_dim = rootgrp.createDimension("quantiles", nq)

    # Populate dimension variables
    lat_var = rootgrp.createVariable("lat", "f4", ("nsites", ))
    lon_var = rootgrp.createVariable("lon", "f4", ("nsites", ))
    id_var = rootgrp.createVariable("id", "i4", ("nsites", ))
    year_var = rootgrp.createVariable("years", "i4", ("years", ))
    q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles", ))

    # Create a data variable
    localslq = rootgrp.createVariable("localSL_quantiles",
                                      "i2", ("quantiles", "nsites", "years"),
                                      zlib=True,
                                      complevel=4)
    #localslq.scale_factor = 0.1

    # Assign attributes
    rootgrp.description = "Local SLR contributions from land water storage from the SSP module set"
    rootgrp.history = "Created " + time.ctime(time.time())
    rootgrp.source = "FACTS: {0}, Scenario: {1}, Baseyear: {2}".format(
        pipeline_id, scen, baseyear)
    lat_var.units = "Degrees North"
    lon_var.units = "Degrees East"
    localslq.units = "mm"

    # Put the data into the netcdf variables
    lat_var[:] = site_lats
    lon_var[:] = site_lons
    id_var[:] = site_ids
    year_var[:] = targyears
    q_var[:] = out_q
    localslq[:, :, :] = local_sl_q

    # Close the netcdf
    rootgrp.close()
Ejemplo n.º 9
0
def kopp14_postprocess_verticallandmotion(nsamps, rng_seed, baseyear, pyear_start, pyear_end, pyear_step, locationfilename, pipeline_id):

	# Read in the data from the preprocessing stage
	datafile = "{}_data.pkl".format(pipeline_id)
	try:
		f = open(datafile, 'rb')
	except:
		print("Cannot open datafile\n")
	
	# Extract the data from the file
	my_data = pickle.load(f)
	
	# Extract the relevant data
	names = my_data['names']
	ids = my_data['ids']
	lats = my_data['lats']
	lons = my_data['lons']
	rates = my_data['rates']
	sds = my_data['sds']
	
	# Define the target years
	targyears = np.arange(pyear_start, pyear_end+1, pyear_step)
	targyears = np.union1d(targyears, baseyear)
	
	# Load site locations
	locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
	(_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)
	
	# Find the nearest points for the query lats/lons
	site_ids_map = NearestPoints(site_lats, site_lons, lats, lons, tol=None)
	
	# Evenly sample an inverse normal distribution
	np.random.seed(rng_seed)
	x = np.linspace(0,1,nsamps+2)[1:(nsamps+1)]
	norm_inv = norm.ppf(x)
	norm_inv_perm = np.random.permutation(norm_inv)
	
	# Output quantiles of interest
	out_q = np.unique(np.append(np.linspace(0,1,101), (0.001, 0.005, 0.01, 0.05, 0.167, 0.50, 0.833, 0.95, 0.99, 0.995, 0.999)))
	nq = len(out_q)
	
	# Initialize variable to hold the samples
	local_sl_q = np.full((nq, len(site_ids), len(targyears)), np.nan)
	
	# Loop over the sites
	nsites = len(site_ids)
	for i in np.arange(0,nsites):
		
		# Skip this site if a match wasn't found
		if site_ids_map[i] is None:
			continue
		
		# This site index
		this_site_ind = site_ids_map[i]
		
		# Loop over the target years
		ntimes = len(targyears)
		for j in np.arange(0,ntimes):
			
			# This target year
			targyear = targyears[j]
		
			# Calculate the samples for this location and time
			GIAproj = rates[this_site_ind] * (targyear - baseyear)
			GIAprojsd = sds[this_site_ind] * (targyear - baseyear)
			these_samps = GIAproj + norm_inv_perm*GIAprojsd
			local_sl_q[:,i,j] = np.quantile(these_samps, out_q)
	
	# Write the localized projections to a netcdf file
	rootgrp = Dataset(os.path.join(os.path.dirname(__file__), "{}_localsl.nc".format(pipeline_id)), "w", format="NETCDF4")

	# Define Dimensions
	site_dim = rootgrp.createDimension("nsites", nsites)
	year_dim = rootgrp.createDimension("years", ntimes)
	q_dim = rootgrp.createDimension("quantiles", nq)

	# Populate dimension variables
	lat_var = rootgrp.createVariable("lat", "f4", ("nsites",))
	lon_var = rootgrp.createVariable("lon", "f4", ("nsites",))
	id_var = rootgrp.createVariable("id", "i4", ("nsites",))
	year_var = rootgrp.createVariable("years", "i4", ("years",))
	q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles",))

	# Create a data variable
	localslq = rootgrp.createVariable("localSL_quantiles", "i2", ("quantiles", "nsites", "years"), zlib=True, complevel=4)
	#localslq.scale_factor = 0.1
	
	# Assign attributes
	rootgrp.description = "Local SLR contributions from vertical land motion according to Kopp 14 workflow"
	rootgrp.history = "Created " + time.ctime(time.time())
	rootgrp.source = "FACTS: {0}, Baseyear: {1}".format(pipeline_id, baseyear)
	lat_var.units = "Degrees North"
	lon_var.units = "Degrees East"
	localslq.units = "mm"

	# Put the data into the netcdf variables
	lat_var[:] = site_lats
	lon_var[:] = site_lons
	id_var[:] = site_ids
	year_var[:] = targyears
	q_var[:] = out_q
	localslq[:,:,:] = local_sl_q

	# Close the netcdf
	rootgrp.close()
def ipccar6_postprocess_gmipemuglaciers(locationfilename, pipeline_id):
	
	# Read in the global projections
	projfile = "{}_projections.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open projfile\n")
		sys.exit(1)
	
	# Extract the projection data from the file
	my_data = pickle.load(f)
	gicsamps = my_data["gic_samps"]
	targyears = my_data["years"]
	scenario = my_data["scenario"]
	baseyear = my_data["baseyear"]
	f.close()
	
	# Load the fingerprint metadata
	fpfile = os.path.join(os.path.dirname(__file__), "fingerprint_region_map.csv")
	fpmap_data = np.genfromtxt(fpfile, dtype=None, names=True, delimiter=',', encoding=None)
	
	# Extract the data
	fpmapperids = fpmap_data['IceID']
	fpmaps = fpmap_data['FPID']
	
	# Load the site locations	
	locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
	(_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)
	
	# Initialize variable to hold the localized projections
	#(nsamps, nregions, ntimes) = gicsamps.shape	# From K14 code. Not consistent with this module.
	(nregions, nsamps, ntimes) = gicsamps.shape
	nsites = len(site_ids)
	local_sl = np.full((nsites, nsamps, ntimes), 0.0) 
	
	# Loop through the GIC regions
	for i in np.arange(0,nregions):
		
		# Get the fingerprint file name for this region
		fp_idx = np.flatnonzero(fpmapperids == i+1)
		thisRegion = fpmaps[fp_idx][0]

		# Get the fingerprints for these sites from this region
		regionfile = os.path.join(os.path.dirname(__file__), "FPRINT", "fprint_{0}.nc".format(thisRegion))
		regionfp = AssignFP(regionfile, site_lats, site_lons)
		
		# Multiply the fingerprints and the projections and add them to the running total
		# over the regions
		local_sl += np.transpose(np.multiply.outer(gicsamps[i,:,:], regionfp), (2,0,1))
	
	# Calculate the quantiles
	out_q = np.unique(np.append(np.linspace(0,1,101), (0.001, 0.005, 0.01, 0.05, 0.167, 0.5, 0.833, 0.95, 0.99, 0.995, 0.999)))
	nq = len(out_q)
	local_sl_q = np.nanquantile(local_sl, out_q, axis=1)
		
	# Write the localized projections to a netcdf file
	rootgrp = Dataset(os.path.join(os.path.dirname(__file__), "{}_localsl.nc".format(pipeline_id)), "w", format="NETCDF4")

	# Define Dimensions
	site_dim = rootgrp.createDimension("nsites", nsites)
	year_dim = rootgrp.createDimension("years", ntimes)
	q_dim = rootgrp.createDimension("quantiles", nq)

	# Populate dimension variables
	lat_var = rootgrp.createVariable("lat", "f4", ("nsites",))
	lon_var = rootgrp.createVariable("lon", "f4", ("nsites",))
	id_var = rootgrp.createVariable("id", "i4", ("nsites",))
	year_var = rootgrp.createVariable("years", "i4", ("years",))
	q_var = rootgrp.createVariable("quantiles", "f4", ("quantiles",))

	# Create a data variable
	localslq = rootgrp.createVariable("localSL_quantiles", "i2", ("quantiles", "nsites", "years"), zlib=True, complevel=4)
	#localslq.scale_factor = 0.1

	# Assign attributes
	rootgrp.description = "Local SLR contributions from glaciers and ice caps according to GMIP2 emulated workflow"
	rootgrp.history = "Created " + time.ctime(time.time())
	rootgrp.source = "FACTS: IPCC AR6 GMIP2 emulated workflow - {0}; Base year {1}".format(scenario, baseyear)
	lat_var.units = "Degrees North"
	lon_var.units = "Degrees East"
	localslq.units = "mm"

	# Put the data into the netcdf variables
	lat_var[:] = site_lats
	lon_var[:] = site_lons
	id_var[:] = site_ids
	year_var[:] = targyears
	q_var[:] = out_q
	localslq[:,:,:] = local_sl_q

	# Close the netcdf
	rootgrp.close()
def ar5_postprocess_glaciers(locationfilename, chunksize, pipeline_id):

	# Read in the global projections
	projfile = "{}_projections.pkl".format(pipeline_id)
	try:
		f = open(projfile, 'rb')
	except:
		print("Cannot open projfile\n")
		sys.exit(1)

	# Extract the projection data from the file
	my_data = pickle.load(f)
	gicsamps = my_data["gicsamps"]
	glac_region_names = my_data["glac_region_names"]
	targyears = my_data["data_years"]
	f.close()

	# Read in the configuration information
	configfile = "{}_data.pkl".format(pipeline_id)
	try:
		f = open(configfile, 'rb')
	except:
		print("Cannot open configfile\n")
		sys.exit(1)

	# Extract the configuration data
	my_data = pickle.load(f)
	scenario = my_data["scenario"]
	baseyear = my_data["startyr"]
	#include_models = my_data['include_models']
	#include_scenarios = my_data['include_scenarios']
	#nmodels = len(include_models)
	f.close()

	# Produce the included model string
	#model_string_pieces = ["{0}-{1}".format(include_models[x], include_scenarios[x]) for x in np.arange(nmodels)]
	#model_string = "Models and scenarios included: " + ", ".join(model_string_pieces)
	model_string = ""

	# Load the site locations
	locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
	(_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

	# Initialize variable to hold the localized projections
	(nsamps, nregions, nyears) = gicsamps.shape
	nsites = len(site_ids)
	#local_sl = da.array(np.full((nsamps, nyears, nsites), 0.0))
	#local_sl = local_sl.rechunk((-1,-1,chunksize))
	local_sl = da.zeros((nsamps, nyears, nsites), chunks=(-1,-1,chunksize))

	# Loop through the GIC regions
	for i in np.arange(0,nregions):

		# Get the fingerprint file name for this region
		thisRegion = glac_region_names[i]

		# Get the fingerprints for these sites from this region
		regionfile = os.path.join(os.path.dirname(__file__), "FPRINT", "fprint_{0}.nc".format(thisRegion))
		regionfp = da.from_array(AssignFP(regionfile, site_lats, site_lons), chunks=chunksize)
		#regionfp = regionfp.rechunk(chunksize)

		# Multiply the fingerprints and the projections and add them to the running total
		# over the regions
		local_sl += np.multiply.outer(gicsamps[:,i,:], regionfp)


	# Define the missing value for the netCDF files
	nc_missing_value = np.iinfo(np.int16).min

	# Create the xarray data structures for the localized projections
	ncvar_attributes = {"description": "Local SLR contributions from glaciers according to AR5 workflow",
			"history": "Created " + time.ctime(time.time()),
			"source": "SLR Framework: AR5 workflow",
			"scenario": scenario,
			"baseyear": baseyear}

	glac_out = xr.Dataset({"sea_level_change": (("samples", "years", "locations"), local_sl, {"units":"mm", "missing_value":nc_missing_value}),
							"lat": (("locations"), site_lats),
							"lon": (("locations"), site_lons)},
		coords={"years": targyears, "locations": site_ids, "samples": np.arange(nsamps)}, attrs=ncvar_attributes)

	glac_out.to_netcdf("{0}_localsl.nc".format(pipeline_id), encoding={"sea_level_change": {"dtype": "i2", "zlib": True, "complevel":4, "_FillValue": nc_missing_value}})



	return(None)
def kopp14_postprocess_verticallandmotion(nsamps, rng_seed, baseyear,
                                          pyear_start, pyear_end, pyear_step,
                                          locationfilename, chunksize,
                                          pipeline_id):

    # Read in the data from the preprocessing stage
    datafile = "{}_data.pkl".format(pipeline_id)
    try:
        f = open(datafile, 'rb')
    except:
        print("Cannot open datafile\n")

    # Extract the data from the file
    my_data = pickle.load(f)

    # Extract the relevant data
    names = my_data['names']
    ids = my_data['ids']
    lats = my_data['lats']
    lons = my_data['lons']
    rates = my_data['rates']
    sds = my_data['sds']

    # Define the target years
    targyears = np.arange(pyear_start, pyear_end + 1, pyear_step)
    targyears = np.union1d(targyears, baseyear)

    # Load site locations
    locationfile = os.path.join(os.path.dirname(__file__), locationfilename)
    (_, site_ids, site_lats, site_lons) = ReadLocationFile(locationfile)

    # Dimension variables
    nyears = len(targyears)
    nsites = len(site_ids)

    # Find the nearest points for the query lats/lons
    site_ids_map = np.array(
        NearestPoints(site_lats, site_lons, lats, lons, tol=None))

    # Evenly sample an inverse normal distribution
    np.random.seed(rng_seed)
    x = np.linspace(0, 1, nsamps + 2)[1:(nsamps + 1)]
    norm_inv = norm.ppf(x)
    norm_inv_perm = np.random.permutation(norm_inv)

    # Missing value for netcdf file
    nc_missing_value = np.iinfo(np.int16).min

    # Get the rates and sds for the locations of interest
    site_rates = da.array([rates[x] for x in site_ids_map])
    site_sds = da.array([sds[x] for x in site_ids_map])

    # Rechunk the rates and sds
    site_rates = site_rates.rechunk(chunksize)
    site_sds = site_sds.rechunk(chunksize)

    # Generate the projected means and standard deviations
    GIAproj = np.multiply.outer(targyears - baseyear, site_rates)
    GIAprojsd = np.multiply.outer(targyears - baseyear, site_sds)

    # Produce the samples from the means and standard deviations
    local_sl = GIAproj + np.multiply.outer(norm_inv_perm, GIAprojsd)

    # Create the xarray data structures for the localized projections
    ncvar_attributes = {
        "description":
        "Local SLR contributions from vertical land motion according to Kopp 2014 workflow",
        "history": "Created " + time.ctime(time.time()),
        "source": "SLR Framework: Kopp 2014 workflow",
        "scenario": "NA",
        "baseyear": baseyear
    }

    vlm_out = xr.Dataset(
        {
            "sea_level_change": (("samples", "years", "locations"), local_sl, {
                "units": "mm",
                "missing_value": nc_missing_value
            }),
            "lat": (("locations"), site_lats),
            "lon": (("locations"), site_lons)
        },
        coords={
            "years": targyears,
            "locations": site_ids,
            "samples": np.arange(nsamps)
        },
        attrs=ncvar_attributes)

    # Write the netcdf output file
    vlm_out.to_netcdf("{0}_localsl.nc".format(pipeline_id),
                      encoding={
                          "sea_level_change": {
                              "dtype": "i2",
                              "zlib": True,
                              "complevel": 4,
                              "_FillValue": nc_missing_value
                          }
                      })

    return (None)