def initialize(file=None, logging_level='INFO', params=None, future=False): """Read the configuration file containing the run's parameters. This should be the first call, before using any of the other OGGM modules for most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. params : dict overrides for specific parameters from the config file future : bool use the new behavior of logging='WORKFLOW'. """ global PARAMS global DATA initialize_minimal(file=file, logging_level=logging_level, params=params, future=future) # Do not spam PARAMS.do_log = False # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Read in the demo glaciers file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'demo_glaciers.csv') DATA['demo_glaciers'] = pd.read_csv(file, index_col=0) # Add other things if 'dem_grids' not in DATA: grids = {} for grid_json in [ 'gimpdem_90m_v01.1.json', 'arcticdem_mosaic_100m_v3.0.json', 'Alaska_albers_V3.json', 'AntarcticDEM_wgs84.json', 'REMA_100m_dem.json' ]: if grid_json not in grids: fp = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', grid_json) try: grids[grid_json] = salem.Grid.from_json(fp) except NameError: pass DATA['dem_grids'] = grids # Trigger a one time check of the hash file from oggm.utils import get_dl_verify_data get_dl_verify_data('dummy_section') # OK PARAMS.do_log = True
def test_github_no_internet(self): self.reset_dir() def fake_down(dl_func, cache_path): # This should never be called, if it still is assert assert False with FakeDownloadManager('_call_dl_func', fake_down): with self.assertRaises(utils.NoInternetException): tmp = cfg.PARAMS['has_internet'] cfg.PARAMS['has_internet'] = False try: utils.download_oggm_files() finally: cfg.PARAMS['has_internet'] = tmp
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call, before using any of the other OGGM modules for most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global IS_INITIALIZED global PARAMS global PATHS global DATA set_logging_config(logging_level=logging_level) if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', file, e) sys.exit() log.workflow('Using configuration file: %s', file) # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf') PARAMS['use_tar_shapefiles'] = cp.as_bool('use_tar_shapefiles') PARAMS['clip_mu_star'] = cp.as_bool('clip_mu_star') PARAMS['clip_tidewater_border'] = cp.as_bool('clip_tidewater_border') PARAMS['dl_verify'] = cp.as_bool('dl_verify') # Climate PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper() PARAMS['baseline_y0'] = cp.as_int('baseline_y0') PARAMS['baseline_y1'] = cp.as_int('baseline_y1') PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') PARAMS['tstar_search_glacierwide'] = cp.as_bool('tstar_search_glacierwide') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') # Inversion k = 'use_shape_factor_for_inversion' PARAMS[k] = cp[k] # Flowline model k = 'use_shape_factor_for_fluxbasedmodel' PARAMS[k] = cp[k] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles', 'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf', 'mp_processes', 'use_multiprocessing', 'baseline_y0', 'baseline_y1', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_multiple_flowlines', 'tstar_search_glacierwide', 'mpi_recv_buf_size', 'hydro_month_nh', 'clip_mu_star', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'use_intersects', 'filter_min_slope', 'clip_tidewater_border', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'dl_verify', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel', 'baseline_climate' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Read-in the reference t* data for all available models types (oggm, vas) model_prefixes = ['oggm_', 'vas_'] for prefix in model_prefixes: fns = [ 'ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp' ] for fn in fns: fpath = get_demo_file(prefix + fn + '.csv') PARAMS[prefix + fn] = pd.read_csv(fpath) fpath = get_demo_file(prefix + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[prefix + fn + '_calib_params'] = mbpar # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file() # Read in the demo glaciers file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'demo_glaciers.csv') DATA['demo_glaciers'] = pd.read_csv(file, index_col=0) # Add other things if 'dem_grids' not in DATA: grids = {} for grid_json in [ 'gimpdem_90m_v01.1.json', 'arcticdem_mosaic_100m_v3.0.json', 'AntarcticDEM_wgs84.json', 'REMA_100m_dem.json' ]: if grid_json not in grids: fp = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', grid_json) try: grids[grid_json] = salem.Grid.from_json(fp) except NameError: pass DATA['dem_grids'] = grids
def initialize(file=None): """Read the configuration file containing the run's parameters.""" global IS_INITIALIZED global PARAMS global PATHS if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') log.info('Parameter file: %s', file) try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Param file could not be parsed (%s): %s', file, e) sys.exit() # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['optimize_thick'] = cp.as_bool('optimize_thick') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') # Climate PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') _factor = cp['prcp_scaling_factor'] if _factor not in ['stddev', 'stddev_perglacier']: _factor = cp.as_float('prcp_scaling_factor') PARAMS['prcp_scaling_factor'] = _factor PARAMS['allow_negative_mustar'] = cp.as_bool('allow_negative_mustar') # Inversion PARAMS['invert_with_sliding'] = cp.as_bool('invert_with_sliding') _k = 'optimize_inversion_params' PARAMS[_k] = cp.as_bool(_k) PARAMS['use_shape_factor_for_inversion'] = \ cp['use_shape_factor_for_inversion'] # Flowline model _k = 'use_optimized_inversion_params' PARAMS[_k] = cp.as_bool(_k) PARAMS['use_shape_factor_for_fluxbasedmodel'] = \ cp['use_shape_factor_for_fluxbasedmodel'] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, SAMPLE_DATA_COMMIT download_oggm_files() # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'climate_file', 'grid_dx_method', 'run_mb_calibration', 'mp_processes', 'use_multiprocessing', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_optimized_inversion_params', 'invert_with_sliding', 'optimize_inversion_params', 'use_multiple_flowlines', 'optimize_thick', 'mpi_recv_buf_size', 'hydro_month_nh', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'prcp_scaling_factor', 'use_intersects', 'filter_min_slope', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'allow_negative_mustar', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file()
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call, before using any of the other OGGM modules for most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global PARAMS global DATA initialize_minimal(file=file, logging_level=logging_level) # Do not spam PARAMS.do_log = False # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Read-in the reference t* data for all available models types (oggm, vas) model_prefixes = ['oggm_', 'vas_'] for prefix in model_prefixes: fns = [ 'ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp' ] for fn in fns: fpath = get_demo_file(prefix + fn + '.csv') PARAMS[prefix + fn] = pd.read_csv(fpath) fpath = get_demo_file(prefix + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[prefix + fn + '_calib_params'] = mbpar # Read in the demo glaciers file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'demo_glaciers.csv') DATA['demo_glaciers'] = pd.read_csv(file, index_col=0) # Add other things if 'dem_grids' not in DATA: grids = {} for grid_json in [ 'gimpdem_90m_v01.1.json', 'arcticdem_mosaic_100m_v3.0.json', 'Alaska_albers_V3.json', 'AntarcticDEM_wgs84.json', 'REMA_100m_dem.json' ]: if grid_json not in grids: fp = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', grid_json) try: grids[grid_json] = salem.Grid.from_json(fp) except NameError: pass DATA['dem_grids'] = grids # Trigger a one time check of the hash file from oggm.utils import get_dl_verify_data get_dl_verify_data('dummy_section') # OK PARAMS.do_log = True
def initialize(file=None): """Read the configuration file containing the run's parameters.""" global IS_INITIALIZED global PARAMS global PATHS global CONTINUE_ON_ERROR global N global A global RHO global RGI_REG_NAMES global RGI_SUBREG_NAMES if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') log.info('Parameter file: %s', file) try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Param file could not be parsed (%s): %s', file, e) sys.exit() CONTINUE_ON_ERROR = cp.as_bool('continue_on_error') # Default PATHS['working_dir'] = cp['working_dir'] if not PATHS['working_dir']: PATHS['working_dir'] = os.path.join(os.path.expanduser('~'), 'OGGM_WORKING_DIRECTORY') # Paths oggm_static_paths() PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] PATHS['wgms_rgi_links'] = cp['wgms_rgi_links'] PATHS['glathida_rgi_links'] = cp['glathida_rgi_links'] PATHS['leclercq_rgi_links'] = cp['leclercq_rgi_links'] # run params PARAMS['run_period'] = [int(vk) for vk in cp.as_list('run_period')] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_divides'] = cp.as_bool('use_divides') PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['optimize_thick'] = cp.as_bool('optimize_thick') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') # Climate PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') _factor = cp['prcp_scaling_factor'] if _factor not in ['stddev', 'stddev_perglacier']: _factor = cp.as_float('prcp_scaling_factor') PARAMS['prcp_scaling_factor'] = _factor # Inversion PARAMS['invert_with_sliding'] = cp.as_bool('invert_with_sliding') _k = 'optimize_inversion_params' PARAMS[_k] = cp.as_bool(_k) # Flowline model PARAMS['bed_shape'] = cp['bed_shape'] _k = 'use_optimized_inversion_params' PARAMS[_k] = cp.as_bool(_k) # Make sure we have a proper cache dir from oggm.utils import download_oggm_files download_oggm_files() # Parse RGI metadata _d = os.path.join(CACHE_DIR, 'oggm-sample-data-master', 'rgi_meta') RGI_REG_NAMES = pd.read_csv(os.path.join(_d, 'rgi_regions.csv'), index_col=0) RGI_SUBREG_NAMES = pd.read_csv(os.path.join(_d, 'rgi_subregions.csv'), index_col=0) # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'climate_file', 'wgms_rgi_links', 'glathida_rgi_links', 'grid_dx_method', 'mp_processes', 'use_multiprocessing', 'use_divides', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_optimized_inversion_params', 'invert_with_sliding', 'optimize_inversion_params', 'use_multiple_flowlines', 'leclercq_rgi_links', 'optimize_thick', 'mpi_recv_buf_size', 'tstar_search_window', 'use_bias_for_run', 'run_period', 'prcp_scaling_factor', 'use_intersects', 'filter_min_slope' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Empty defaults from oggm.utils import get_demo_file set_divides_db(get_demo_file('divides_alps.shp')) set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) IS_INITIALIZED = True
""" OGGM package. Copyright: OGGM developers, 2014-2018 License: GPLv3+ """ # flake8: noqa from ._version import get_versions __version__ = get_versions()['version'] del get_versions try: from oggm.mpi import _init_oggm_mpi _init_oggm_mpi() except ImportError: pass # API # TODO: why are some funcs here? maybe reconsider what API actually is from oggm.utils import entity_task, global_task, GlacierDirectory from oggm.core.centerlines import Centerline from oggm.core.flowline import Flowline # Make sure we have the sample data at import from oggm.utils import download_oggm_files download_oggm_files()
def initialize(file=None): """Read the configuration file containing the run's parameters.""" global IS_INITIALIZED global PARAMS global PATHS global CONTINUE_ON_ERROR global N global A global RHO global RGI_REG_NAMES global RGI_SUBREG_NAMES if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') log.info('Parameter file: %s', file) try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Param file could not be parsed (%s): %s', file, e) sys.exit() CONTINUE_ON_ERROR = cp.as_bool('continue_on_error') # Default PATHS['working_dir'] = cp['working_dir'] if not PATHS['working_dir']: PATHS['working_dir'] = os.path.join(os.path.expanduser('~'), 'OGGM_WORKING_DIRECTORY') # Paths oggm_static_paths() PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] PATHS['wgms_rgi_links'] = cp['wgms_rgi_links'] PATHS['glathida_rgi_links'] = cp['glathida_rgi_links'] PATHS['leclercq_rgi_links'] = cp['leclercq_rgi_links'] # run params PARAMS['run_period'] = [int(vk) for vk in cp.as_list('run_period')] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_divides'] = cp.as_bool('use_divides') PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['optimize_thick'] = cp.as_bool('optimize_thick') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') # Climate PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') _factor = cp['prcp_scaling_factor'] if _factor not in ['stddev', 'stddev_perglacier']: _factor = cp.as_float('prcp_scaling_factor') PARAMS['prcp_scaling_factor'] = _factor # Inversion PARAMS['invert_with_sliding'] = cp.as_bool('invert_with_sliding') _k = 'optimize_inversion_params' PARAMS[_k] = cp.as_bool(_k) # Flowline model _k = 'use_optimized_inversion_params' PARAMS[_k] = cp.as_bool(_k) # Make sure we have a proper cache dir from oggm.utils import download_oggm_files download_oggm_files() # Parse RGI metadata _d = os.path.join(CACHE_DIR, 'oggm-sample-data-master', 'rgi_meta') RGI_REG_NAMES = pd.read_csv(os.path.join(_d, 'rgi_regions.csv'), index_col=0) RGI_SUBREG_NAMES = pd.read_csv(os.path.join(_d, 'rgi_subregions.csv'), index_col=0) # Delete non-floats ltr = ['working_dir', 'dem_file', 'climate_file', 'wgms_rgi_links', 'glathida_rgi_links', 'grid_dx_method', 'mp_processes', 'use_multiprocessing', 'use_divides', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_optimized_inversion_params', 'invert_with_sliding', 'optimize_inversion_params', 'use_multiple_flowlines', 'leclercq_rgi_links', 'optimize_thick', 'mpi_recv_buf_size', 'tstar_search_window', 'use_bias_for_run', 'run_period', 'prcp_scaling_factor', 'use_intersects', 'filter_min_slope', 'auto_skip_task'] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Empty defaults from oggm.utils import get_demo_file set_divides_db(get_demo_file('divides_alps.shp')) set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) IS_INITIALIZED = True
def initialize(file=None): """Read the configuration file containing the run's parameters.""" global IS_INITIALIZED global PARAMS global PATHS if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') log.info('Parameter file: %s', file) try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Param file could not be parsed (%s): %s', file, e) sys.exit() # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf') # Climate PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper() PARAMS['baseline_y0'] = cp.as_int('baseline_y0') PARAMS['baseline_y1'] = cp.as_int('baseline_y1') PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') PARAMS['allow_negative_mustar'] = cp.as_bool('allow_negative_mustar') # Inversion k = 'use_shape_factor_for_inversion' PARAMS[k] = cp[k] # Flowline model k = 'use_shape_factor_for_fluxbasedmodel' PARAMS[k] = cp[k] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Delete non-floats ltr = ['working_dir', 'dem_file', 'climate_file', 'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf', 'mp_processes', 'use_multiprocessing', 'baseline_y0', 'baseline_y1', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_multiple_flowlines', 'mpi_recv_buf_size', 'hydro_month_nh', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'use_intersects', 'filter_min_slope', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'allow_negative_mustar', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel', 'baseline_climate'] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Read-in the reference t* data - maybe it will be used, maybe not fns = ['ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp'] for fn in fns: PARAMS[fn] = pd.read_csv(get_demo_file('oggm_' + fn + '.csv')) fpath = get_demo_file('oggm_' + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[fn+'_calib_params'] = mbpar # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file()
def initialize(file=None): """Read the configuration file containing the run's parameters.""" global IS_INITIALIZED global BASENAMES global PARAMS global PATHS global NAMES global CONTINUE_ON_ERROR global GRAVITY_CONST global E_FIRN global ZERO_DEG_KELVIN global R global LATENT_HEAT_FUSION_WATER global HEAT_CAP_ICE global N global A global RHO global RHO_W global RGI_REG_NAMES global RGI_SUBREG_NAMES # This is necessary as OGGM still refers to its own initialisation oggminitialize() import oggm.cfg as oggmcfg # Add the CRAMPON-specific keys to the dicts oggmcfg.BASENAMES.update(CBASENAMES) if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') log.info('Parameter file: %s', file) try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', file, e) sys.exit() # Default oggmcfg.PATHS['working_dir'] = cp['working_dir'] if not oggmcfg.PATHS['working_dir']: oggmcfg.PATHS['working_dir'] = os.path.join(os.path.expanduser('~'), 'OGGM_WORKING_DIRECTORY') # Paths oggm_static_paths() oggmcfg.PATHS['dem_dir'] = cp['dem_dir'] # run params oggmcfg.PARAMS['run_period'] = [int(vk) for vk in cp.as_list('run_period')] CPARAMS['date'] = [int(vk) for vk in cp.as_list('date')] CPARAMS['cloudcover'] = [int(vk) for vk in cp.as_list('cloudcover')] # Multiprocessing pool oggmcfg.PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') oggmcfg.PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params oggmcfg.PARAMS['grid_dx_method'] = cp['grid_dx_method'] oggmcfg.PARAMS['topo_interp'] = cp['topo_interp'] oggmcfg.PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') # Make sure we have a proper cache dir from oggm.utils import download_oggm_files download_oggm_files() # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'dem_dir', 'grid_dx_method', 'mp_processes', 'use_multiprocessing', 'hfile', 'topo_interp', 'date', 'continue_on_error', 'cloudcover', 'run_period', 'auto_skip_task' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: oggmcfg.PARAMS[k] = cp.as_float(k) # Empty defaults from oggm.utils import get_demo_file set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp')) IS_INITIALIZED = True # Update the dicts in case there are changes oggmcfg.PATHS.update(CPATHS) oggmcfg.PARAMS.update(CPARAMS) BASENAMES = oggmcfg.BASENAMES PATHS = oggmcfg.PATHS PARAMS = oggmcfg.PARAMS # Always call this one! Creates tmp_dir etc. oggm_static_paths()
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call, before using any of the other OGGM modules for most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global IS_INITIALIZED global PARAMS global PATHS global DEMO_GLACIERS set_logging_config(logging_level=logging_level) if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', file, e) sys.exit() log.workflow('Using configuration file: %s', file) # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf') PARAMS['use_tar_shapefiles'] = cp.as_bool('use_tar_shapefiles') PARAMS['clip_mu_star'] = cp.as_bool('clip_mu_star') PARAMS['clip_tidewater_border'] = cp.as_bool('clip_tidewater_border') PARAMS['dl_verify'] = cp.as_bool('dl_verify') # Climate PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper() PARAMS['baseline_y0'] = cp.as_int('baseline_y0') PARAMS['baseline_y1'] = cp.as_int('baseline_y1') PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') PARAMS['tstar_search_glacierwide'] = cp.as_bool('tstar_search_glacierwide') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') # Inversion k = 'use_shape_factor_for_inversion' PARAMS[k] = cp[k] # Flowline model k = 'use_shape_factor_for_fluxbasedmodel' PARAMS[k] = cp[k] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Delete non-floats ltr = ['working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles', 'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf', 'mp_processes', 'use_multiprocessing', 'baseline_y0', 'baseline_y1', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_multiple_flowlines', 'tstar_search_glacierwide', 'mpi_recv_buf_size', 'hydro_month_nh', 'clip_mu_star', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'use_intersects', 'filter_min_slope', 'clip_tidewater_border', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'dl_verify', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel', 'baseline_climate'] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Read-in the reference t* data - maybe it will be used, maybe not fns = ['ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp'] for fn in fns: PARAMS[fn] = pd.read_csv(get_demo_file('oggm_' + fn + '.csv')) fpath = get_demo_file('oggm_' + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[fn+'_calib_params'] = mbpar # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file() # Read in the demo glaciers file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data', 'demo_glaciers.csv') DEMO_GLACIERS = pd.read_csv(file, index_col=0)