def _test(): """Use new found scaling parameters for HISTALP commitment run""" # start logger with OGGM settings cfg.set_logging_config() # get computed scaling parameters scaling_params_dict = json.load( open( '/home/users/moberrauch/run_output/scaling_params/scaling_params.json' )) # set scaling parameters scaling_params_list = [ tuple(list(scaling_params_dict['const_expo'].values())[:4]) ] # Global # define file suffixes suffixes = ['_lin_reg'] # get HISTALP RGI IDs rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values sensitivity_run_vas(rgi_ids=rgi_ids, scaling_params=scaling_params_list, suffixes=suffixes, temp_bias=+0.5, nyears=100)
def hef_sensitivity_local(): """ The sensitivtiy experiments for Hintereisferner can be run locally, since they don't need much computational power. The can also be used as "Test case" if changes to the functions are made. """ # start logger with OGGM settings cfg.set_logging_config() # define path to local directories as environmental variables WORKDIR = '/Users/oberrauch/work/master/working_directories/test_scaling_const/' OUTDIR = '/Users/oberrauch/work/master/data/hef_sensitivity/' os.environ['WORKDIR'] = WORKDIR os.environ['OUTDIR'] = OUTDIR # specify Hintereisferner RGI ID rgi_ids = ['RGI60-11.00897'] # compute custom scaling parameters for Hintereisferner log.info('Compute scaling params') scaling_params = compute_scaling_params(rgi_ids, path=True) # ---------------------------- # SCALING PARAMS SENSITIVITY # ---------------------------- # use default time scales but custom scaling constants # set scaling parameters scaling_params_list = [(4.5507, 0.191, 2.2, 1.375)] # Global # get custom scaling constants const_only = list(scaling_params['const_only'].values()) # add default scaling exponents const_only.extend([2.2, 1.375]) # add to scaling parameter list scaling_params_list.append(tuple(const_only)) # define file suffixes and file path suffixes = ['_default', '_fixed_exp'] fpath = os.path.join(OUTDIR, 'scaling_param_sensitivity.nc') log.info('Run scaling param sensitivity') sensitivity_run_vas(rgi_ids=rgi_ids, scaling_params=scaling_params_list, suffixes=suffixes, temp_bias=+0.5, path=fpath) # ------------------------ # TIME SCALE SENSITIVITY # ------------------------ # use default scaling parameters and scale time scales by factor 0.5 and 2 # define file suffixes and file path suffixes = ['_default', '_half', '_twice'] fpath = os.path.join(OUTDIR, 'time_scale_sensitivity.nc') log.info('Run time scale sensitivity') sensitivity_run_vas(rgi_ids=rgi_ids, time_scale_factors=[1, 0.5, 2], suffixes=suffixes, temp_bias=+0.5, path=fpath)
def histalp_commitment_custom_scaling(): """Use new found scaling parameters for HISTALP commitment run""" # start logger with OGGM settings cfg.set_logging_config() # get computed scaling parameters scaling_params_dict = json.load( open( '/home/users/moberrauch/run_output/scaling_params/scaling_params.json' )) # set scaling parameters scaling_params_list = [(4.5507, 0.191, 2.2, 1.375)] # Global # get custom scaling constants const_only = list(scaling_params_dict['const_only'].values()) # add default scaling exponents const_only.extend([2.2, 1.375]) # add to scaling parameter list scaling_params_list.append(tuple(const_only)) # consts only scaling_params_list.append( tuple(list( scaling_params_dict['const_expo'].values())[:4])) # best fit # define file suffixes suffixes = ['_default', '_fixed_exp', '_lin_reg'] # get HISTALP RGI IDs rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values sensitivity_run_vas(rgi_ids=rgi_ids, scaling_params=scaling_params_list, suffixes=suffixes, temp_bias=+0.5)
def histalp_commitment_custom_scaling(path=True): """Use new found scaling parameters for HISTALP commitment run""" # start logger with OGGM settings cfg.set_logging_config() # get computed scaling parameters # scaling_params_dict = json.load(open('/home/users/moberrauch/run_output/scaling_params/scaling_params.json')) # scaling_params_dict = json.load(open('/Users/oberrauch/work/master/data/scaling_params/scaling_params.json')) scaling_params_dict = json.load( open( '/Users/oberrauch/work/master/data/hef_sensitivity/scaling_params.json' )) # set scaling parameters scaling_params_list = [(4.5507, 0.191, 2.2, 1.375)] # Global const_only = list(scaling_params_dict['const_only'].values()) const_only.extend([2.2, 1.375]) scaling_params_list.append(tuple(const_only)) # consts only # scaling_params_list.append(tuple(list(scaling_params_dict['const_expo'].values())[:4])) # best fit # define file suffixes # suffixes = ['_default', '_fixed_exp', '_lin_reg'] suffixes = ['_default', '_fixed_exp'] # get HISTALP RGI IDs # rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values rgi_ids = ['RGI60-11.00897'] sensitivity_run_vas(rgi_ids=rgi_ids, scaling_params=scaling_params_list, suffixes=suffixes, temp_bias=+0.5, path=path, use_bias_for_run=False, tstar=1927)
def histalp_scaling_params(): """Compute scaling constants and exponents for the HISTALP domain.""" # start logger with OGGM settings cfg.set_logging_config() # get HISTALP RGI IDs rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values compute_scaling_params(rgi_ids, path=True)
def histalp_timescale_sensitivity(): """Use new found scaling parameters for HISTALP commitment run The time scales are additionally scaled by a factor 0.5 and 2.""" # start logger with OGGM settings cfg.set_logging_config() # define file suffixes suffixes = ['_default', '_half', '_twice'] # get HISTALP RGI IDs rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values sensitivity_run_vas(rgi_ids=rgi_ids, time_scale_factors=[1, 0.5, 2], suffixes=suffixes, temp_bias=+0.5)
def histalp_timescale_sensitivity(path=True): """Use new found scaling parameters for HISTALP commitment run""" # start logger with OGGM settings cfg.set_logging_config() # define file suffixes suffixes = ['_half', '_default', '_twice'] # get HISTALP RGI IDs # rgi_ids = pd.read_csv('/home/users/moberrauch/data/histalp_rgi_ids.csv', index_col=0)['RGIId'].values rgi_ids = ['RGI60-11.00897'] sensitivity_run_vas(rgi_ids=rgi_ids, time_scale_factors=[0.5, 1, 2], suffixes=suffixes, temp_bias=+0.5, path=path, use_bias_for_run=False, tstar=1927)
def mb_runs(rgi_ids, tstar=None): """Calls the `climate_run_...` routines for the Hintereisferner, combines the resulting datasets for the VAS and flowline model and stores it to file. """ fl_ds = climate_run_fl(rgi_ids, tstar=tstar, use_bias_for_run=True) vas_ds = climate_run_vas(rgi_ids, tstar=tstar, use_bias_for_run=True) # concat datasets by evolution balance model ds = xr.concat([vas_ds, fl_ds], pd.Index(['vas', 'fl'], name='model')) ds.to_netcdf( '/Users/oberrauch/work/master/data/eq_runs/climate_rofental.nc') if __name__ == '__main__': """ If script gets called, equilibrium run results (and corresponding climate) for the Hintereisferner are computed and stored to file. """ # start logger with OGGM settings cfg.set_logging_config() # get RGI IDs rgi_ids = ['RGI60-11.00897'] # mb_runs(rgi_ids) eq_runs(rgi_ids)