def process_custom_climate_data(gdir): """Processes and writes the climate data from a user-defined climate file. The input file must have a specific format (see oggm-sample-data/test-files/histalp_merged_hef.nc for an example). Uses caching for faster retrieval. This is the way OGGM does it for the Alps (HISTALP). """ if not (('climate_file' in cfg.PATHS) and os.path.exists(cfg.PATHS['climate_file'])): raise IOError('Custom climate file not found') # read the file fpath = cfg.PATHS['climate_file'] nc_ts = salem.GeoNetcdf(fpath) # set temporal subset for the ts data (hydro years) yrs = nc_ts.time.year y0, y1 = yrs[0], yrs[-1] nc_ts.set_period(t0='{}-10-01'.format(y0), t1='{}-09-01'.format(y1)) time = nc_ts.time ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') # Units assert nc_ts._nc.variables['hgt'].units.lower() in [ 'm', 'meters', 'meter', 'metres', 'metre' ] assert nc_ts._nc.variables['temp'].units.lower() in [ 'degc', 'degrees', 'degree', 'c' ] assert nc_ts._nc.variables['prcp'].units.lower() in [ 'kg m-2', 'l m-2', 'mm', 'millimeters', 'millimeter' ] # geoloc lon = nc_ts._nc.variables['lon'][:] lat = nc_ts._nc.variables['lat'][:] # Gradient defaults use_grad = cfg.PARAMS['temp_use_local_gradient'] def_grad = cfg.PARAMS['temp_default_gradient'] g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) ref_pix_lon = lon[ilon] ref_pix_lat = lat[ilat] iprcp, itemp, igrad, ihgt = utils.joblib_read_climate( fpath, ilon, ilat, def_grad, g_minmax, use_grad) gdir.write_monthly_climate_file(time, iprcp, itemp, igrad, ihgt, ref_pix_lon, ref_pix_lat) # metadata out = {'climate_source': fpath, 'hydro_yr_0': y0 + 1, 'hydro_yr_1': y1} gdir.write_pickle(out, 'climate_info')
def process_custom_climate_data(gdir): """Processes and writes the climate data from a user-defined climate file. The input file must have a specific format (see oggm-sample-data/test-files/histalp_merged_hef.nc for an example). Uses caching for faster retrieval. This is the way OGGM does it for the Alps (HISTALP). """ if not (('climate_file' in cfg.PATHS) and os.path.exists(cfg.PATHS['climate_file'])): raise IOError('Custom climate file not found') # read the file fpath = cfg.PATHS['climate_file'] nc_ts = salem.GeoNetcdf(fpath) # set temporal subset for the ts data (hydro years) yrs = nc_ts.time.year y0, y1 = yrs[0], yrs[-1] nc_ts.set_period(t0='{}-10-01'.format(y0), t1='{}-09-01'.format(y1)) time = nc_ts.time ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') # Units assert nc_ts._nc.variables['hgt'].units.lower() in ['m', 'meters', 'meter', 'metres', 'metre'] assert nc_ts._nc.variables['temp'].units.lower() in ['degc', 'degrees', 'degree', 'c'] assert nc_ts._nc.variables['prcp'].units.lower() in ['kg m-2', 'l m-2', 'mm', 'millimeters', 'millimeter'] # geoloc lon = nc_ts._nc.variables['lon'][:] lat = nc_ts._nc.variables['lat'][:] # Gradient defaults use_grad = cfg.PARAMS['temp_use_local_gradient'] def_grad = cfg.PARAMS['temp_default_gradient'] g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) ref_pix_lon = lon[ilon] ref_pix_lat = lat[ilat] iprcp, itemp, igrad, ihgt = utils.joblib_read_climate(fpath, ilon, ilat, def_grad, g_minmax, use_grad) gdir.write_monthly_climate_file(time, iprcp, itemp, igrad, ihgt, ref_pix_lon, ref_pix_lat) # metadata out = {'climate_source': fpath, 'hydro_yr_0': y0+1, 'hydro_yr_1': y1} gdir.write_pickle(out, 'climate_info')
def process_histalp_nonparallel(gdirs, fpath=None): """This is the way OGGM used to do it (deprecated). It requires an input file with a specific format, and uses lazy optimisation (computing time dependant gradients can be slow) """ # Did the user specify a specific climate data file? if fpath is None: if ('climate_file' in cfg.PATHS): fpath = cfg.PATHS['climate_file'] if not os.path.exists(fpath): raise IOError('Custom climate file not found') log.info('process_histalp_nonparallel') # read the file and data entirely (faster than many I/O) with netCDF4.Dataset(fpath, mode='r') as nc: lon = nc.variables['lon'][:] lat = nc.variables['lat'][:] # Time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') y0, y1 = time[0].year, time[-1].year # Units assert nc.variables['hgt'].units == 'm' assert nc.variables['temp'].units == 'degC' assert nc.variables['prcp'].units == 'kg m-2' # Gradient defaults use_grad = cfg.PARAMS['temp_use_local_gradient'] def_grad = cfg.PARAMS['temp_default_gradient'] g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] for gdir in gdirs: ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) ref_pix_lon = lon[ilon] ref_pix_lat = lat[ilat] iprcp, itemp, igrad, ihgt = utils.joblib_read_climate(fpath, ilon, ilat, def_grad, g_minmax, use_grad) gdir.write_monthly_climate_file(time, iprcp, itemp, igrad, ihgt, ref_pix_lon, ref_pix_lat) # metadata out = {'climate_source': fpath, 'hydro_yr_0': y0+1, 'hydro_yr_1': y1} gdir.write_pickle(out, 'climate_info')
def distribute_climate_data(gdirs): """Reads the Histalp climate data and distributes to each glacier. Not to be multi-processed. Parameters: ----------- gdirs: the list of GlacierDir objects where to write the data. I/O --- Generates a NetCDF file in the root glacier directory (climate_monthly.nc) It contains the timeseries of temperature, temperature gradient, and precipitation at the nearest grid point. The climate data reference height is provided as global attribute.""" log.info('Distribute climate data') # read the file and data entirely (faster than many I/O) ncpath = cfg.paths['histalp_file'] nc = netCDF4.Dataset(ncpath, mode='r') lon = nc.variables['lon'][:] lat = nc.variables['lat'][:] # Time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') # Units assert nc.variables['hgt'].units == 'm' assert nc.variables['temp'].units == 'degC' assert nc.variables['prcp'].units == 'kg m-2' # Gradient defaults def_grad = -0.0065 g_minmax = cfg.params['temp_local_gradient_bounds'] sf = cfg.params['prcp_scaling_factor'] for gdir in gdirs: ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) iprcp, itemp, igrad, ihgt = utils.joblib_read_climate(ncpath, ilon, ilat, def_grad, g_minmax, sf) gdir.create_monthly_climate_file(time, iprcp, itemp, igrad, ihgt) nc.close()
def _distribute_histalp_syle(gdirs, fpath): """This is the way OGGM does it for the Alps. It requires an input file with a specific format, and uses lazy optimisation (computing time dependant gradients can be slow) """ # read the file and data entirely (faster than many I/O) nc = netCDF4.Dataset(fpath, mode='r') lon = nc.variables['lon'][:] lat = nc.variables['lat'][:] # Time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') # Units assert nc.variables['hgt'].units == 'm' assert nc.variables['temp'].units == 'degC' assert nc.variables['prcp'].units == 'kg m-2' # Gradient defaults use_grad = cfg.PARAMS['temp_use_local_gradient'] def_grad = cfg.PARAMS['temp_default_gradient'] g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] sf = cfg.PARAMS['prcp_scaling_factor'] for gdir in gdirs: ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) iprcp, itemp, igrad, ihgt = utils.joblib_read_climate(fpath, ilon, ilat, def_grad, g_minmax, sf, use_grad) gdir.write_monthly_climate_file(time, iprcp, itemp, igrad, ihgt) nc.close()
def _distribute_histalp_syle(gdirs, fpath): """This is the way OGGM does it for the Alps. It requires an input file with a specific format, and uses lazy optimisation (computing time dependant gradients can be slow) """ # read the file and data entirely (faster than many I/O) nc = netCDF4.Dataset(fpath, mode='r') lon = nc.variables['lon'][:] lat = nc.variables['lat'][:] # Time time = nc.variables['time'] time = netCDF4.num2date(time[:], time.units) ny, r = divmod(len(time), 12) if r != 0: raise ValueError('Climate data should be N full years exclusively') # Units assert nc.variables['hgt'].units == 'm' assert nc.variables['temp'].units == 'degC' assert nc.variables['prcp'].units == 'kg m-2' # Gradient defaults use_grad = cfg.PARAMS['temp_use_local_gradient'] def_grad = cfg.PARAMS['temp_default_gradient'] g_minmax = cfg.PARAMS['temp_local_gradient_bounds'] sf = cfg.PARAMS['prcp_scaling_factor'] for gdir in gdirs: ilon = np.argmin(np.abs(lon - gdir.cenlon)) ilat = np.argmin(np.abs(lat - gdir.cenlat)) iprcp, itemp, igrad, ihgt = utils.joblib_read_climate( fpath, ilon, ilat, def_grad, g_minmax, sf, use_grad) gdir.write_monthly_climate_file(time, iprcp, itemp, igrad, ihgt) nc.close()