Example #1
0
        if not os.path.exists(output_path):
            os.makedirs(output_path)

        print(input_path)

        # # NOTE:
        # ALL DATA FALL INTO THE historical ARGUMENT WITH THIS DOWNSCALING DUE TO NOT USING A CLIMATOLOGY TO
        # GENERATE ANOMALIES WE ARE GENERATING DELTAS OF MIN/MAX FROM MEAN OF THE SAME VARIABLE GROUP ie. tas/tasmax/tasmin
        # list files for this set of downscaling -- one per folder
        fn, = glob.glob(os.path.join(input_path, '*.nc'))
        historical = downscale.Dataset(fn,
                                       variable,
                                       model,
                                       scenario,
                                       project=project,
                                       units=units,
                                       metric=metric,
                                       begin=begin,
                                       end=end)
        # mean data -- hacky...
        mean_fn, = glob.glob(
            os.path.join(input_path.replace(variable, mean_variable), '*.nc'))
        mean_ds = downscale.Dataset(mean_fn,
                                    mean_variable,
                                    model,
                                    scenario,
                                    project=project,
                                    units=units,
                                    metric=metric,
                                    begin=begin,
Example #2
0
		filelist = [ i for i in filelist if '_14_' not in i ] # remove the GD ANNUAL _14_ file.
		baseline = downscale.Baseline( filelist )
		
		input_path = os.path.join( base_path, model, scenario, variable )
		output_path = os.path.join( output_dir, model, scenario, variable )

		if not os.path.exists( output_path ):
			os.makedirs( output_path )

		print( input_path )

		# list files for this set of downscaling -- one per folder
		fn, = glob.glob( os.path.join( input_path, '*.nc' ) )

		if 'historical' in scenario:
			historical = downscale.Dataset( fn, variable, model, scenario, project=project, units=units, metric=metric, begin=1860, end=2005 )
			future = None # no need for futures here....
		else:
			# get the historical data for anomalies
			historical_fn, = glob.glob( os.path.join( os.path.dirname( fn ).replace( scenario, 'historical' ), '*.nc' ) )
			historical = downscale.Dataset( historical_fn, variable, model, scenario, project=project, units=units, metric=metric, begin=1860, end=2005 )
			future = downscale.Dataset( fn, variable, model, scenario, project=project, units=units, metric=metric, begin=2006, end=2100 )
		
		# convert from Kelvin to Celcius
		if variable != 'pr':
			if historical:
				historical.ds[ variable ] = historical.ds[ variable ] - 273.15
				historical.ds[ variable ][ 'units' ] = units
			
			if future:
				future.ds[ variable ] = future.ds[ variable ] - 273.15
Example #3
0
        if not os.path.exists(output_path):
            os.makedirs(output_path)

        print(input_path)

        # list files for this set of downscaling -- one per folder
        fn, = glob.glob(os.path.join(input_path, '*.nc'))

        if 'historical' in scenario:
            historical = downscale.Dataset(fn,
                                           variable,
                                           model,
                                           scenario,
                                           project=project,
                                           units=units,
                                           metric=metric,
                                           begin=1900,
                                           end=2005,
                                           level_name=level_name,
                                           level=level)
            future = None
        else:
            # get the historical data for anomalies
            historical_fn, = glob.glob(
                os.path.join(
                    os.path.dirname(fn).replace(scenario, 'historical'),
                    '*.nc'))
            historical = downscale.Dataset(historical_fn,
                                           variable,
                                           model,
Example #4
0
# SETUP BASELINE
clim_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_october_final/cru_cl20/cld/akcan'
filelist = glob.glob(os.path.join(clim_path, '*.tif'))
baseline = downscale.Baseline(filelist)

# SETUP DATASET
output_dir = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/test'
future_fn = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/snap_prepped_data/IPSL-CM5A-LR/hur/hur_Amon_IPSL-CM5A-LR_rcp26_r1i1p1_200601_210012.nc'
historical_fn = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/snap_prepped_data/IPSL-CM5A-LR/hur/hur_Amon_IPSL-CM5A-LR_historical_r1i1p1_185001_200512.nc'
variable = 'hur'
model = 'IPSL-CM5A-LR'
scenario = 'rcp26'
historical = downscale.Dataset(historical_fn,
                               variable,
                               model,
                               scenario,
                               units=None)
future = downscale.Dataset(future_fn, variable, model, scenario, units=None)

# DOWNSCALE
mask = rasterio.open(baseline.filelist[0]).read_masks(1)
clim_begin = '1961'
clim_end = '1990'
ar5 = downscale.DeltaDownscale( baseline, clim_begin, clim_end, historical, future, \
  metric='mean', ds_type='absolute', level=1000, level_name='plev' )# add in the mask!
ar5.downscale(output_dir=output_dir)

# CRU historical
output_dir = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/test'
historical_fn = '/Data/Base_Data/Climate/World/CRU_grids/CRU_TS323/cru_ts3.23.1901.2014.cld.dat.nc'
Example #5
0
    import downscale

    # minimum required arguments
    ar5_modeled = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/data/prepped/clt_prepped/IPSL-CM5A-LR/clt/clt_Amon_IPSL-CM5A-LR_rcp26_r1i1p1_200601_210012.nc'
    ar5_historical = '/workspace/Shared/Tech_Projects/ESGF_Data_Access/project_data/data/prepped/clt_prepped/IPSL-CM5A-LR/clt/clt_Amon_IPSL-CM5A-LR_historical_r1i1p1_185001_200512.nc'
    clim_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_october_final/cru_cl20/cld/akcan'
    template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'
    base_path = '/atlas_scratch/malindgren/CMIP5/TEST_AR5'

    # run
    # down = DownscaleAR5.DownscaleAR5( ar5_modeled, ar5_historical, base_path, clim_path, template_raster_fn=template_raster_fn, ncores=32 ) #, climatology_begin, climatology_end, plev, absolute, metric, ncores )
    # output = down.downscale_ar5_ts()
    down = downscale.Dataset(
        ar5_modeled,
        ar5_historical,
        base_path,
        clim_path,
        template_raster_fn=template_raster_fn,
        ncores=32
    )  #, climatology_begin, climatology_end, plev, absolute, metric, ncores )
    output = down.downscale_ar5_ts()

# CRU
if __name__ == '__main__':

    # import modules
    from downscale import DownscaleCRU

    # example of post_downscale_function - pass in at DownscaleCRU()
    def clamp_vals(x):
        ''' clamp the values following the relative humidity downscaling '''
        x[(x > 100) & (x < 500)] = 95
Example #6
0
	baseline = downscale.Baseline( filelist )

	# DOWNSCALE
	mask = rasterio.open( baseline.filelist[0] ).read_masks( 1 )

	# make round/trunc function for post_downscale_function
	if variable == 'pr' or variable == 'pre':
		rounder = np.rint
		downscaling_operation = 'mult'
	else:
		rounder = partial( np.around, decimals=1 )
		downscaling_operation = 'add'

	def round_it( arr ):
		return rounder( arr )

	historical = Dataset( cru_ts, variable, model, scenario, project, units, metric, 
							method='linear', ncpus=ncpus, interp=True )

	mean_fn = cru_ts.replace( variable, mean_variable_cru )
	mean_ds = downscale.Dataset( mean_fn, mean_variable_cru, model, scenario, project=project, units=units, metric=metric, begin=begin, end=end, interp=True )

	# FOR CRU WE PASS THE interp=True so we interpolate across space first when creating the Dataset()
	ar5 = DeltaDownscaleMinMax( baseline=baseline, clim_begin=clim_begin, clim_end=clim_end, historical=historical, future=None,
				downscaling_operation=downscaling_operation, mask=mask, mask_value=0, ncpus=ncpus,
				src_crs={'init':'epsg:4326'}, src_nodata=None, dst_nodata=None,
				post_downscale_function=round_it, varname=out_varname, modelname=None, anom=anom, 
					mean_ds=mean_ds, mean_variable=mean_variable_cru, interp=interp )

	ar5.downscale( output_dir=output_path )
Example #7
0
    input_path = os.path.join(base_dir, model, scenario, variable)
    output_path = os.path.join(output_dir, model, scenario, variable)

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    print(input_path)

    # list files for this set of downscaling -- one per folder
    fn, = glob.glob(os.path.join(input_path, '*.nc'))

    if 'historical' in scenario:
        historical = downscale.Dataset(fn,
                                       variable,
                                       model,
                                       scenario,
                                       project=project,
                                       units=units)
        future = None  # no need for futures here....
    else:
        # get the historical data for anomalies
        historical_fn, = glob.glob(
            os.path.join(
                os.path.dirname(fn).replace(scenario, 'historical'), '*.nc'))
        historical = downscale.Dataset(historical_fn,
                                       variable,
                                       model,
                                       scenario,
                                       project=project,
                                       units=units)
        future = downscale.Dataset(fn,