Exemplo n.º 1
0
def up_to_climate(reset=False):
    """Run the tasks you want."""

    # test directory
    if not os.path.exists(_TEST_DIR):
        os.makedirs(_TEST_DIR)
    if reset:
        clean_dir(_TEST_DIR)

    if not os.path.exists(CLI_LOGF):
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)

    # Init
    cfg.initialize()

    # Use multiprocessing
    cfg.PARAMS['use_multiprocessing'] = use_multiprocessing()

    # Working dir
    cfg.PATHS['working_dir'] = _TEST_DIR
    cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))

    # Read in the RGI file
    rgi_file = get_demo_file('rgi_oetztal.shp')
    rgidf = gpd.read_file(rgi_file)

    # Make a fake marine and lake terminating glacier
    cfg.PARAMS['tidewater_type'] = 4  # make lake also calve
    rgidf.loc[0, 'GlacType'] = '0199'
    rgidf.loc[1, 'GlacType'] = '0299'

    # Use RGI6
    rgidf['RGIId'] = [s.replace('RGI50', 'RGI60') for s in rgidf.RGIId]

    # Be sure data is downloaded
    cru.get_cru_cl_file()

    # Params
    cfg.PARAMS['border'] = 70
    cfg.PARAMS['tstar_search_window'] = [1902, 0]
    cfg.PARAMS['prcp_scaling_factor'] = 1.75
    cfg.PARAMS['temp_melt'] = -1.75
    cfg.PARAMS['use_kcalving_for_inversion'] = True
    cfg.PARAMS['use_kcalving_for_run'] = True

    # Go
    gdirs = workflow.init_glacier_directories(rgidf)

    try:
        tasks.catchment_width_correction(gdirs[0])
    except Exception:
        reset = True

    if reset:
        # First preprocessing tasks
        workflow.gis_prepro_tasks(gdirs)

    return gdirs
Exemplo n.º 2
0
def test_multiple_inversion():

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_mdir')
    if not os.path.exists(testdir):
        os.makedirs(testdir)

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['border'] = 40
    cfg.PARAMS['run_mb_calibration'] = True
    cfg.PARAMS['baseline_climate'] = 'CUSTOM'
    cfg.PATHS['working_dir'] = testdir

    # Get the RGI ID
    hef_rgi = gpd.read_file(get_demo_file('divides_hef.shp'))
    hef_rgi.loc[0, 'RGIId'] = 'RGI50-11.00897'

    gdirs = workflow.init_glacier_regions(hef_rgi)
    workflow.gis_prepro_tasks(gdirs)
    workflow.climate_tasks(gdirs)
    workflow.inversion_tasks(gdirs)

    fig, ax = plt.subplots()
    graphics.plot_inversion(gdirs, ax=ax)
    fig.tight_layout()
    shutil.rmtree(testdir)
    return fig
Exemplo n.º 3
0
    def cfg_init(self):

        # Init
        cfg.initialize()
        cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
        cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
        cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
        cfg.PARAMS['baseline_climate'] = 'CUSTOM'
Exemplo n.º 4
0
    def setUp(self):

        # test directory
        self.testdir = os.path.join(get_test_dir(), 'tmp')
        if not os.path.exists(self.testdir):
            os.makedirs(self.testdir)
        self.clean_dir()

        # Init
        cfg.initialize()
        cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
        cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
        cfg.PATHS['working_dir'] = self.testdir
Exemplo n.º 5
0
def up_to_climate(reset=False):
    """Run the tasks you want."""

    # test directory
    if not os.path.exists(TEST_DIR):
        os.makedirs(TEST_DIR)
    if reset:
        clean_dir(TEST_DIR)

    if not os.path.exists(CLI_LOGF):
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)

    # Init
    cfg.initialize()

    # Use multiprocessing
    cfg.PARAMS['use_multiprocessing'] = use_multiprocessing()

    # Working dir
    cfg.PATHS['working_dir'] = TEST_DIR
    cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))

    # Read in the RGI file
    rgi_file = get_demo_file('rgi_oetztal.shp')
    rgidf = gpd.read_file(rgi_file)

    # Be sure data is downloaded
    cl = utils.get_cru_cl_file()

    # Params
    cfg.PARAMS['border'] = 70
    cfg.PARAMS['optimize_inversion_params'] = True
    cfg.PARAMS['use_optimized_inversion_params'] = True
    cfg.PARAMS['tstar_search_window'] = [1902, 0]
    cfg.PARAMS['run_mb_calibration'] = True

    # Go
    gdirs = workflow.init_glacier_regions(rgidf)

    try:
        tasks.catchment_width_correction(gdirs[0])
    except Exception:
        reset = True

    if reset:
        # First preprocessing tasks
        workflow.gis_prepro_tasks(gdirs)

    return gdirs
Exemplo n.º 6
0
def up_to_climate(reset=False):
    """Run the tasks you want."""

    # test directory
    if not os.path.exists(TEST_DIR):
        os.makedirs(TEST_DIR)
    if reset:
        clean_dir(TEST_DIR)

    if not os.path.exists(CLI_LOGF):
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)

    # Init
    cfg.initialize()

    # Use multiprocessing
    cfg.PARAMS['use_multiprocessing'] = use_multiprocessing()

    # Working dir
    cfg.PATHS['working_dir'] = TEST_DIR
    cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))

    # Read in the RGI file
    rgi_file = get_demo_file('rgi_oetztal.shp')
    rgidf = gpd.read_file(rgi_file)

    # Be sure data is downloaded
    utils.get_cru_cl_file()

    # Params
    cfg.PARAMS['border'] = 70
    cfg.PARAMS['tstar_search_window'] = [1902, 0]
    cfg.PARAMS['run_mb_calibration'] = True

    # Go
    gdirs = workflow.init_glacier_regions(rgidf)

    try:
        tasks.catchment_width_correction(gdirs[0])
    except Exception:
        reset = True

    if reset:
        # First preprocessing tasks
        workflow.gis_prepro_tasks(gdirs)

    return gdirs
Exemplo n.º 7
0
    def setUp(self):
        # test directory
        self.testdir = os.path.join(get_test_dir(), 'tmp_g2ti')
        if not os.path.exists(self.testdir):
            os.makedirs(self.testdir)
        self.clean_dir()

        self.idir = os.path.join(g2ti.geometry_dir, 'RGI60-11',
                                 'RGI60-11.00887')

        # Init
        cfg.initialize()
        cfg.PATHS['working_dir'] = self.testdir
        cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
        gdf = cfg.PARAMS['intersects_gdf']
        gdf['RGIId_1'] = gdf['RGIId_1'].str.replace('RGI50', 'RGI60')
        gdf['RGIId_2'] = gdf['RGIId_2'].str.replace('RGI50', 'RGI60')
Exemplo n.º 8
0
def test_multiple_models():

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_mdir')
    utils.mkdir(testdir, reset=True)

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['baseline_climate'] = 'CUSTOM'
    cfg.PARAMS['trapezoid_lambdas'] = 1
    cfg.PARAMS['border'] = 40
    apply_test_ref_tstars()

    # Get the RGI ID
    hef_rgi = gpd.read_file(get_demo_file('divides_hef.shp'))
    hef_rgi.loc[0, 'RGIId'] = 'RGI50-11.00897'

    gdirs = workflow.init_glacier_directories(hef_rgi)
    workflow.gis_prepro_tasks(gdirs)
    workflow.climate_tasks(gdirs)
    workflow.inversion_tasks(gdirs)

    models = []
    for gdir in gdirs:
        flowline.init_present_time_glacier(gdir)
        fls = gdir.read_pickle('model_flowlines')
        models.append(flowline.FlowlineModel(fls))

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdirs, ax=ax, model=models)
    fig.tight_layout()

    shutil.rmtree(testdir)
    return fig
Exemplo n.º 9
0
import geopandas as gpd
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xarray as xr

import oggm
from oggm import cfg, tasks
from oggm.core.climate import (mb_yearly_climate_on_glacier,
                               t_star_from_refmb,
                               local_t_star, mu_star_calibration)
from oggm.core.massbalance import (ConstantMassBalance)
from oggm.utils import get_demo_file, gettempdir

cfg.initialize()
cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')

base_dir = gettempdir('Climate_docs')
entity = gpd.read_file(get_demo_file('HEF_MajDivide.shp')).iloc[0]
gdir = oggm.GlacierDirectory(entity, base_dir=base_dir)

tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.catchment_area(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
data_dir = get_demo_file('HISTALP_precipitation_all_abs_1801-2014.nc')
Exemplo n.º 10
0
def init_glacier_regions(rgidf=None, reset=False, force=False):
    """Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.

    Returns
    -------
    a list of GlacierDirectory objects
    """

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without a rgi file')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:
        for _, entity in rgidf.iterrows():
            gdir = oggm.GlacierDirectory(entity, reset=reset)
            if not os.path.exists(gdir.get_filepath('dem')):
                new_gdirs.append((gdir, dict(entity=entity)))
            gdirs.append(gdir)

    # We can set the intersects file automatically here
    if (cfg.PARAMS['use_intersects'] and new_gdirs
            and (len(cfg.PARAMS['intersects_gdf']) == 0)):
        rgi_ids = np.unique(np.sort([t[0].rgi_id for t in new_gdirs]))
        rgi_version = new_gdirs[0][0].rgi_version
        fp = utils.get_rgi_intersects_entities(rgi_ids, version=rgi_version)
        cfg.set_intersects_db(fp)

    # If not initialized, run the task in parallel
    execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemplo n.º 11
0
def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
                  output_folder='', working_dir='', is_test=False,
                  test_rgidf=None, test_intersects_file=None,
                  test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    odf = pd.DataFrame()

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    base_dir = os.path.join(output_folder)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    # Read RGI
    start = time.time()
    if test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(2)
    _add_time_to_df(odf, 'Read RGI', time.time()-start)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # Initialize working directories
    start = time.time()
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
    _add_time_to_df(odf, 'init_glacier_regions', time.time()-start)

    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    # Tasks
    task_list = [
        tasks.process_cru_data,
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
        tasks.init_present_time_glacier,
    ]
    for task in task_list:
        start = time.time()
        workflow.execute_entity_task(task, gdirs)
        _add_time_to_df(odf, task.__name__, time.time()-start)

    # Runs
    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, bias=0, seed=0,
                                 output_filesuffix='_tstar')
    _add_time_to_df(odf, 'run_random_climate_tstar_250', time.time()-start)

    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, y0=1995, seed=0,
                                 output_filesuffix='_commit')
    _add_time_to_df(odf, 'run_random_climate_commit_250', time.time()-start)

    # Compile results
    start = time.time()
    utils.compile_glacier_statistics(gdirs)
    _add_time_to_df(odf, 'compile_glacier_statistics', time.time()-start)

    start = time.time()
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=[1920, 1960, 2000])
    _add_time_to_df(odf, 'compile_climate_statistics', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_tstar')
    _add_time_to_df(odf, 'compile_run_output_tstar', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_commit')
    _add_time_to_df(odf, 'compile_run_output_commit', time.time()-start)

    # Log
    opath = os.path.join(base_dir, 'benchmarks_b{:03d}.csv'.format(border))
    odf.index.name = 'Task'
    odf.to_csv(opath)
    log.workflow('OGGM benchmarks is done!')
Exemplo n.º 12
0
    def setup_cache(self):

        setattr(full_workflow.setup_cache, "timeout", 360)

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        # Pre-download other files which will be needed later
        utils.get_cru_cl_file()
        utils.get_cru_file(var='tmp')
        utils.get_cru_file(var='pre')

        # Get the RGI glaciers for the run.
        rgi_list = ['RGI60-01.10299', 'RGI60-11.00897', 'RGI60-18.02342']
        rgidf = utils.get_rgi_glacier_entities(rgi_list)

        # We use intersects
        db = utils.get_rgi_intersects_region_file(version='61',
                                                  rgi_ids=rgi_list)
        cfg.set_intersects_db(db)

        # Sort for more efficient parallel computing
        rgidf = rgidf.sort_values('Area', ascending=False)

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.compute_downstream_line,
            tasks.compute_downstream_bedshape,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        execute_entity_task(tasks.local_mustar, gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        # Final preparation for the run
        execute_entity_task(tasks.init_present_time_glacier, gdirs)

        # Random climate representative for the tstar climate, without bias
        # In an ideal world this would imply that the glaciers remain stable,
        # but it doesn't have to be so
        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            bias=0,
                            nyears=100,
                            output_filesuffix='_tstar')

        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            y0=1990,
                            nyears=100,
                            output_filesuffix='_pd')

        # Compile output
        utils.glacier_characteristics(gdirs)
        utils.compile_run_output(gdirs, filesuffix='_tstar')
        utils.compile_run_output(gdirs, filesuffix='_pd')
        utils.compile_climate_input(gdirs)

        return gdirs
Exemplo n.º 13
0
def init_hef(reset=False,
             border=40,
             invert_with_sliding=True,
             invert_with_rectangular=True):

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not invert_with_sliding:
        testdir += '_withoutslide'
    if not invert_with_rectangular:
        testdir += '_withoutrectangular'
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['border'] = border
    cfg.PARAMS['use_optimized_inversion_params'] = True

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity=entity)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    climate.mu_candidates(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf)
    climate.local_mustar(gdir,
                         tstar=res['t_star'][-1],
                         bias=res['bias'][-1],
                         prcp_fac=res['prcp_fac'])
    climate.apparent_mb(gdir)

    inversion.prepare_for_inversion(
        gdir,
        add_debug_var=True,
        invert_with_rectangular=invert_with_rectangular)
    ref_v = 0.573 * 1e9

    if invert_with_sliding:

        def to_optimize(x):
            # For backwards compat
            _fd = 1.9e-24 * x[0]
            glen_a = (cfg.N + 2) * _fd / 2.
            fs = 5.7e-20 * x[1]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=fs,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1, 1],
                                    bounds=((0.01, 10), (0.01, 10)),
                                    tol=1e-4)['x']
        _fd = 1.9e-24 * out[0]
        glen_a = (cfg.N + 2) * _fd / 2.
        fs = 5.7e-20 * out[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    else:

        def to_optimize(x):
            glen_a = cfg.A * x[0]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=0.,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1],
                                    bounds=((0.01, 10), ),
                                    tol=1e-4)['x']
        glen_a = cfg.A * out[0]
        fs = 0.
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    try:
        d['factor_fs'] = out[1]
    except IndexError:
        d['factor_fs'] = 0.
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
Exemplo n.º 14
0
def init_glacier_regions(rgidf=None, *, reset=False, force=False,
                         from_prepro_level=None, prepro_border=None,
                         prepro_rgi_version=None,
                         from_tar=False, delete_tar=False,
                         use_demo_glaciers=None):
    """Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame or list of ids, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.
    from_prepro_level : int
        get the gdir data from the official pre-processed pool. See the
        documentation for more information
    prepro_border : int
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['border']`
    prepro_rgi_version : str
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['rgi_version']`
    use_demo_glaciers : bool
        whether to check the demo glaciers for download (faster than the
        standard prepro downloads). The default is to decide whether or
        not to check based on simple crietria such as glacier list size.
    from_tar : bool, default=False
        extract the gdir data from a tar file. If set to `True`,
        will check for a tar file at the expected location in `base_dir`.
    delete_tar : bool, default=False
        delete the original tar file after extraction.
    delete_tar : bool, default=False
        delete the original tar file after extraction.

    Returns
    -------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the initialised glacier directories
    """

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    if prepro_border is None:
        prepro_border = int(cfg.PARAMS['border'])

    if from_prepro_level and prepro_border not in [10, 80, 160, 250]:
        if 'test' not in utils._downloads.GDIR_URL:
            raise InvalidParamsError("prepro_border or cfg.PARAMS['border'] "
                                     "should be one of: 10, 80, 160, 250.")

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without setting rgidf')
        log.workflow('init_glacier_regions by parsing available folders '
                     '(can be slow).')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:

        # Check if dataframe or list of strs
        try:
            entities = []
            for _, entity in rgidf.iterrows():
                entities.append(entity)
        except AttributeError:
            entities = utils.tolist(rgidf)

        # Check demo
        if use_demo_glaciers is None:
            use_demo_glaciers = len(entities) < 100

        if from_prepro_level is not None:
            log.workflow('init_glacier_regions from prepro level {} on '
                         '{} glaciers.'.format(from_prepro_level,
                                               len(entities)))
            gdirs = execute_entity_task(gdir_from_prepro, entities,
                                        from_prepro_level=from_prepro_level,
                                        prepro_border=prepro_border,
                                        prepro_rgi_version=prepro_rgi_version,
                                        check_demo_glacier=use_demo_glaciers)
        else:
            # TODO: if necessary this could use multiprocessing as well
            for entity in entities:
                gdir = oggm.GlacierDirectory(entity, reset=reset,
                                             from_tar=from_tar,
                                             delete_tar=delete_tar)
                if not os.path.exists(gdir.get_filepath('dem')):
                    new_gdirs.append((gdir, dict(entity=entity)))
                gdirs.append(gdir)

    # We can set the intersects file automatically here
    if (cfg.PARAMS['use_intersects'] and new_gdirs and
            (len(cfg.PARAMS['intersects_gdf']) == 0)):
        rgi_ids = np.unique(np.sort([t[0].rgi_id for t in new_gdirs]))
        rgi_version = new_gdirs[0][0].rgi_version
        fp = utils.get_rgi_intersects_entities(rgi_ids, version=rgi_version)
        cfg.set_intersects_db(fp)

    # If not initialized, run the task in parallel
    execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemplo n.º 15
0
# This is the default in OGGM
cfg.PARAMS['prcp_scaling_factor'] = 2.5

# Set to True for operational runs
cfg.CONTINUE_ON_ERROR = False
cfg.PARAMS['auto_skip_task'] = False

# Test
cfg.PARAMS['mixed_min_shape'] = 0.003
cfg.PARAMS['default_parabolic_bedshape'] = 0.003
cfg.PARAMS['trapezoid_lambdas'] = 0.

# Don't use divides for now
cfg.set_divides_db()
cfg.set_intersects_db()

# Pre-download other files which will be needed later
_ = utils.get_cru_file(var='tmp')
_ = utils.get_cru_file(var='pre')

# Copy the precalibrated tstar file
# ---------------------------------

# Note that to be exact, this procedure can only be applied if the run
# parameters don't change between the calibration and the run.
# After testing, it appears that changing the 'border' parameter won't affect
# the results much (expectedly), so that it's ok to change it. All the rest
# (e.g. smoothing, dx, prcp factor...) should imply a re-calibration

mbf = 'https://dl.dropboxusercontent.com/u/20930277/ref_tstars_b60_prcpfac_25_defaults.csv'
Exemplo n.º 16
0
# exclude
entity = entity[~entity.RGIId.isin(rm_list)]
entity = entity[~entity.RGIId.isin(rm_list2)]

# test with one glacier
#entity = entity[entity.RGIId == 'RGI60-05.01920']

# compute intersects
new_intersects = compute_intersects(entity)
# store intersects to working dir
entity_intersects_path = os.path.join(cfg.PATHS['working_dir'],
                                      'entity_intersects.shp')
new_intersects.to_file(entity_intersects_path)

# set intersect file to use
cfg.set_intersects_db(new_intersects)

# year where outlines are valid
outline_year = os.path.basename(fp)[:4]

# check geometries
#for idx, row in entity.iterrows():
#    if entity.geometry.iloc[idx].type != 'Polygon':
#        print(row['RGIId'] + row.geometry.type)

# Local working directory (where OGGM will write its output)
WORKING_DIR = utils.gettempdir('User_past_mb', reset=True)
utils.mkdir(WORKING_DIR, reset=True)
cfg.PATHS['working_dir'] = WORKING_DIR

#cfg.PATHS['working_dir'] = utils.gettempdir('user', reset=True)
Exemplo n.º 17
0
def init_hef(reset=False, border=40, logging_level='INFO'):

    from oggm.core import gis, inversion, climate, centerlines, flowline
    import geopandas as gpd

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize(logging_level=logging_level)
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['baseline_climate'] = ''
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
    climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias'])
    climate.mu_star_calibration(gdir)

    inversion.prepare_for_inversion(gdir, add_debug_var=True)

    ref_v = 0.573 * 1e9

    glen_n = cfg.PARAMS['glen_n']

    def to_optimize(x):
        # For backwards compat
        _fd = 1.9e-24 * x[0]
        glen_a = (glen_n + 2) * _fd / 2.
        fs = 5.7e-20 * x[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a)
        return (v - ref_v)**2

    out = optimization.minimize(to_optimize, [1, 1],
                                bounds=((0.01, 10), (0.01, 10)),
                                tol=1e-4)['x']
    _fd = 1.9e-24 * out[0]
    glen_a = (glen_n + 2) * _fd / 2.
    fs = 5.7e-20 * out[1]
    v, _ = inversion.mass_conservation_inversion(gdir,
                                                 fs=fs,
                                                 glen_a=glen_a,
                                                 write=True)

    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    d['factor_fs'] = out[1]
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
Exemplo n.º 18
0
cfg.PATHS['working_dir'] = WORKING_DIR

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True

# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 160

# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = True
cfg.PARAMS['auto_skip_task'] = False

# We use intersects
rgif = utils.get_rgi_intersects_region_file(rgi_reg, version=rgi_version)
cfg.set_intersects_db(rgif)

# Get the RGI file
rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg, version=rgi_version))

# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

# Module logger
log = logging.getLogger(__name__)
log.info('Starting run for RGI reg: ' + rgi_reg)
log.info('Number of glaciers: {}'.format(len(rgidf)))

# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf)
Exemplo n.º 19
0
def run_cmip():
    # Initialize OGGM and set up the default run parameters
    vascaling.initialize(logging_level='WORKFLOW')
    rgi_version = '62'
    cfg.PARAMS['border'] = 80

    # CLUSTER paths
    wdir = os.environ.get('WORKDIR', '')
    cfg.PATHS['working_dir'] = wdir
    outdir = os.environ.get('OUTDIR', '')

    # define the baseline climate CRU or HISTALP
    cfg.PARAMS['baseline_climate'] = 'CRU'
    # set the mb hyper parameters accordingly
    cfg.PARAMS['prcp_scaling_factor'] = 3
    cfg.PARAMS['temp_melt'] = 0
    cfg.PARAMS['temp_all_solid'] = 4
    cfg.PARAMS['run_mb_calibration'] = False
    # set minimum ice thickness to include in glacier length computation
    # this reduces weird spikes in length records
    cfg.PARAMS['min_ice_thick_for_length'] = 0.1

    # the bias is defined to be zero during the calibration process,
    # which is why we don't use it here to reproduce the results
    cfg.PARAMS['use_bias_for_run'] = True

    # read RGI entry for the glaciers as DataFrame
    # containing the outline area as shapefile
    # RGI glaciers
    rgi_reg = os.environ.get('OGGM_RGI_REG', '')
    if rgi_reg not in ['{:02d}'.format(r) for r in range(1, 20)]:
        raise RuntimeError('Need an RGI Region')
    rgi_ids = gpd.read_file(
        utils.get_rgi_region_file(rgi_reg, version=rgi_version))

    # get and set path to intersect shapefile
    intersects_db = utils.get_rgi_intersects_region_file(region=rgi_reg)
    cfg.set_intersects_db(intersects_db)

    # operational run, all glaciers should run
    cfg.PARAMS['continue_on_error'] = True

    # Module logger
    log = logging.getLogger(__name__)
    log.workflow('Starting run for RGI reg {}'.format(rgi_reg))

    # Go - get the pre-processed glacier directories
    # base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/' \
    #            'L3-L5_files/RGIV62_fleb_qc3_CRU_pcp2.5'
    prepro_dir = '/home/users/moberrauch/run_output/vas_prepro/'
    gdirs = workflow.init_glacier_directories(rgi_ids, from_tar=prepro_dir)

    # # run vascaling climate tasks
    # workflow.execute_entity_task(vascaling.local_t_star, gdirs)
    # # adjust mass balance residual with geodetic observations
    # vascaling.match_regional_geodetic_mb(gdirs=gdirs, rgi_reg=rgi_reg)
    # # prepare historic "spinup"
    # workflow.execute_entity_task(vascaling.run_from_climate_data, gdirs,
    #                              ys=2003, ye=2020,
    #                              output_filesuffix='_historical')

    # read gcm list
    gcms = pd.read_csv('/home/www/oggm/cmip6/all_gcm_list.csv', index_col=0)

    # iterate over all specified gcms
    for gcm in sys.argv[1:]:
        # iterate over all SSPs (Shared Socioeconomic Pathways)
        df1 = gcms.loc[gcms.gcm == gcm]
        for ssp in df1.ssp.unique():
            df2 = df1.loc[df1.ssp == ssp]
            assert len(df2) == 2
            # get temperature projections
            ft = df2.loc[df2['var'] == 'tas'].iloc[0]
            # get precipitation projections
            fp = df2.loc[df2['var'] == 'pr'].iloc[0].path
            rid = ft.fname.replace('_r1i1p1f1_tas.nc', '')
            ft = ft.path

            log.workflow('Starting run for {}'.format(rid))

            workflow.execute_entity_task(
                gcm_climate.process_cmip_data,
                gdirs,
                filesuffix='_' + rid,
                # recognize the climate file for later
                fpath_temp=ft,
                # temperature projections
                fpath_precip=fp,  # precip projections
                year_range=('1981', '2020'))

            workflow.execute_entity_task(vascaling.run_from_climate_data,
                                         gdirs,
                                         climate_filename='gcm_data',
                                         climate_input_filesuffix='_' + rid,
                                         init_model_filesuffix='_historical',
                                         output_filesuffix=rid,
                                         return_value=False)
            gcm_dir = os.path.join(outdir, 'RGI' + rgi_reg, gcm)
            utils.mkdir(gcm_dir)
            utils.compile_run_output(gdirs,
                                     input_filesuffix=rid,
                                     path=os.path.join(gcm_dir, rid + '.nc'))

    log.workflow('OGGM Done')
Exemplo n.º 20
0
def init_hef(reset=False, border=40):

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['baseline_climate'] = ''
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity=entity)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
    climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias'])
    climate.mu_star_calibration(gdir)

    inversion.prepare_for_inversion(gdir, add_debug_var=True)

    ref_v = 0.573 * 1e9

    glen_n = cfg.PARAMS['glen_n']

    def to_optimize(x):
        # For backwards compat
        _fd = 1.9e-24 * x[0]
        glen_a = (glen_n+2) * _fd / 2.
        fs = 5.7e-20 * x[1]
        v, _ = inversion.mass_conservation_inversion(gdir, fs=fs,
                                                     glen_a=glen_a)
        return (v - ref_v)**2

    out = optimization.minimize(to_optimize, [1, 1],
                                bounds=((0.01, 10), (0.01, 10)),
                                tol=1e-4)['x']
    _fd = 1.9e-24 * out[0]
    glen_a = (glen_n+2) * _fd / 2.
    fs = 5.7e-20 * out[1]
    v, _ = inversion.mass_conservation_inversion(gdir, fs=fs,
                                                 glen_a=glen_a,
                                                 write=True)

    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    d['factor_fs'] = out[1]
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
Exemplo n.º 21
0
def configure(workdir, glclist, baselineclimate='HISTALP', resetwd=False):
    global MERGEDICT
    global GLCDICT
    global ADDITIONAL_REFERENCE_GLACIERS

    # Initialize OGGM
    cfg.initialize()
    cfg.PATHS['working_dir'] = workdir

    # Local working directory (where OGGM will write its output)
    utils.mkdir(workdir, reset=resetwd)

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = False

    # We use intersects
    cfg.PARAMS['use_intersects'] = True
    rgif = utils.get_rgi_intersects_region_file('11', version='60')
    cfg.set_intersects_db(rgif)

    cfg.PARAMS['use_rgi_area'] = True

    # set negative flux filtering to false. should be standard soon
    cfg.PARAMS['filter_for_neg_flux'] = False
    cfg.PARAMS['correct_for_neg_flux'] = True

    # here in relic we want to run the mb calibration every time
    cfg.PARAMS['run_mb_calibration'] = True

    # glacier length
    cfg.PARAMS['min_ice_thick_for_length'] = 1.0
    cfg.PARAMS['glacier_length_method'] = 'consecutive'

    # check if we want to merge a glacier
    mglclist = []
    for glc in glclist:
        mglc = merge_pair_dict(glc)
        if mglc is not None:
            mglclist += mglc[0]

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = 160

    gdirs = workflow.init_glacier_regions(glclist + mglclist,
                                          from_prepro_level=3)

    # and we want to use all glaciers for the MB calibration
    refids = get_ref_mb_glaciers_candidates()
    # right now we only do Alpine glaciers
    refids = [rid for rid in refids if '-11.' in rid]
    # but do leave out the actual glaciers
    refids = [rid for rid in refids if rid not in glclist + mglclist]
    # I SAID ALPS, NOT PYRENEES
    refids.remove('RGI60-11.03232')
    refids.remove('RGI60-11.03209')
    refids.remove('RGI60-11.03241')
    # initialize the reference glaciers with a small border
    ref_gdirs = workflow.init_glacier_regions(rgidf=refids,
                                              from_prepro_level=3,
                                              prepro_border=10)
    # save these ids for later
    ADDITIONAL_REFERENCE_GLACIERS = refids

    # climate
    if baselineclimate == 'CRU':
        cfg.PARAMS['prcp_scaling_factor'] = 2.5
        cfg.PARAMS['temp_melt'] = -1.0

    if baselineclimate == 'HISTALP':
        cfg.PARAMS['baseline_climate'] = baselineclimate
        # and set standard histalp values
        cfg.PARAMS['prcp_scaling_factor'] = 1.75
        cfg.PARAMS['temp_melt'] = -1.75
        # run histalp climate on all glaciers!
        execute_entity_task(tasks.process_histalp_data,
                            gdirs + ref_gdirs,
                            y0=1849)

    # TODO: if I do use custom climate stuff like histalp_annual_mean:
    #   ->>>> look back at commits before 1.10.2019

    return gdirs
Exemplo n.º 22
0
from gmd_analysis_scripts import PLOT_DIR
from oggm import utils

dir_path = os.path.join(tempfile.gettempdir(), 'fig_01')
fig_path = os.path.join(PLOT_DIR, 'workflow_tas.pdf')

cfg.initialize()
cfg.PARAMS['border'] = 20

cfg.PATHS['working_dir'] = dir_path
utils.mkdir(dir_path, reset=True)

rgidf = utils.get_rgi_glacier_entities(['RGI60-18.02342'])
entity = rgidf.iloc[0]

cfg.set_intersects_db(utils.get_rgi_intersects_entities(['RGI60-18.02342']))

gdir = oggm.GlacierDirectory(entity, base_dir=dir_path)

tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.compute_downstream_bedshape(gdir)
tasks.catchment_area(gdir)
tasks.catchment_intersections(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
tasks.process_cru_data(gdir)
tasks.local_t_star(gdir)
Exemplo n.º 23
0
def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
                      output_folder='', working_dir='', is_test=False,
                      demo=False, test_rgidf=None, test_intersects_file=None,
                      test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Time
    start = time.time()

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    climate_periods = [1920, 1960, 2000]

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    rgi_dir_name = 'RGI{}'.format(rgi_version)
    border_dir_name = 'b_{:03d}'.format(border)
    base_dir = os.path.join(output_folder, rgi_dir_name, border_dir_name)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DEMO_GLACIERS.index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(4)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # L1 - initialize working directories
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L1', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L1 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L1')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L2 - Tasks
    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    workflow.execute_entity_task(tasks.process_cru_data, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L2', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L2 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L2')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L3 - Tasks
    task_list = [
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs, add_climate_period=climate_periods,
                                     path=opath)

    # L3 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L4 - Tasks
    workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Copy mini data to new dir
    base_dir = os.path.join(base_dir, 'L4')
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir, gdirs,
                                              base_dir=base_dir)

    # L4 OK - compress all in output directory
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=True)
    utils.base_dir_to_tar(base_dir)

    # Log
    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.workflow('OGGM prepro_levels is done! Time needed: '
                 '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))
Exemplo n.º 24
0
    cfg.PARAMS['border'] = 200

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    cfg.PARAMS['run_mb_calibration'] = False
    cfg.PARAMS['optimize_inversion_params'] = False
    cfg.PARAMS['dl_verify'] = True

    # add to BASENAMES
    _doc = 'contains observed and searched glacier from synthetic experiment to find intial state'
    cfg.BASENAMES['synthetic_experiment'] = ('synthetic_experiment.pkl', _doc)

    # We use intersects
    db = utils.get_rgi_intersects_region_file(version='61', region=REGION)
    cfg.set_intersects_db(db)

    # RGI file
    path = utils.get_rgi_region_file(REGION, version='61')
    rgidf = gpd.read_file(path)
    rgidf = rgidf.sort_values('Area', ascending=False)

    # exclude non-landterminating glaciers
    rgidf = rgidf[rgidf.TermType == 0]
    rgidf = rgidf[rgidf.Connect != 2]

    wgms = utils.get_ref_mb_glaciers_candidates()

    # Keep only the wgms reference glaciers
    rgidf = rgidf.loc[rgidf.RGIId.isin(wgms)]
Exemplo n.º 25
0
def init_glacier_regions(rgidf=None, *, reset=False, force=False,
                         from_prepro_level=None, prepro_border=None,
                         prepro_rgi_version=None, prepro_base_url=None,
                         from_tar=False, delete_tar=False,
                         use_demo_glaciers=None):
    """DEPRECATED: Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame or list of ids, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.
    from_prepro_level : int
        get the gdir data from the official pre-processed pool. See the
        documentation for more information
    prepro_border : int
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['border']`
    prepro_rgi_version : str
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['rgi_version']`
    prepro_base_url : str
        for `from_prepro_level` only: if you want to override the default
        URL from which to download the gdirs. Default currently is
        https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.1/
    use_demo_glaciers : bool
        whether to check the demo glaciers for download (faster than the
        standard prepro downloads). The default is to decide whether or
        not to check based on simple criteria such as glacier list size.
    from_tar : bool, default=False
        extract the gdir data from a tar file. If set to `True`,
        will check for a tar file at the expected location in `base_dir`.
    delete_tar : bool, default=False
        delete the original tar file after extraction.
    delete_tar : bool, default=False
        delete the original tar file after extraction.

    Returns
    -------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the initialised glacier directories

    Notes
    -----
    This task is deprecated in favor of the more explicit
    init_glacier_directories. Indeed, init_glacier_directories is very
    similar to init_glacier_regions, but it does not process the DEMs:
    a glacier directory is valid also without DEM.
    """

    _check_duplicates(rgidf)

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    if prepro_border is None:
        prepro_border = int(cfg.PARAMS['border'])

    if from_prepro_level and prepro_border not in [10, 80, 160, 250]:
        if 'test' not in utils._downloads.GDIR_URL:
            raise InvalidParamsError("prepro_border or cfg.PARAMS['border'] "
                                     "should be one of: 10, 80, 160, 250.")

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without setting rgidf')
        log.workflow('init_glacier_regions by parsing available folders '
                     '(can be slow).')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:

        # Check if dataframe or list of strs
        try:
            entities = []
            for _, entity in rgidf.iterrows():
                entities.append(entity)
        except AttributeError:
            entities = utils.tolist(rgidf)

        # Check demo
        if use_demo_glaciers is None:
            use_demo_glaciers = len(entities) < 100

        if from_prepro_level is not None:
            log.workflow('init_glacier_regions from prepro level {} on '
                         '{} glaciers.'.format(from_prepro_level,
                                               len(entities)))
            # Read the hash dictionary before we use multiproc
            if cfg.PARAMS['dl_verify']:
                utils.get_dl_verify_data('cluster.klima.uni-bremen.de')
            gdirs = execute_entity_task(gdir_from_prepro, entities,
                                        from_prepro_level=from_prepro_level,
                                        prepro_border=prepro_border,
                                        prepro_rgi_version=prepro_rgi_version,
                                        check_demo_glacier=use_demo_glaciers,
                                        base_url=prepro_base_url)
        else:
            # We can set the intersects file automatically here
            if (cfg.PARAMS['use_intersects'] and
                    len(cfg.PARAMS['intersects_gdf']) == 0):
                rgi_ids = np.unique(np.sort([entity.RGIId for entity in
                                             entities]))
                rgi_version = rgi_ids[0].split('-')[0][-2:]
                fp = utils.get_rgi_intersects_entities(rgi_ids,
                                                       version=rgi_version)
                cfg.set_intersects_db(fp)

            gdirs = execute_entity_task(utils.GlacierDirectory, entities,
                                        reset=reset,
                                        from_tar=from_tar,
                                        delete_tar=delete_tar)

            for gdir in gdirs:
                if not os.path.exists(gdir.get_filepath('dem')):
                    new_gdirs.append(gdir)

    if len(new_gdirs) > 0:
        # If not initialized, run the task in parallel
        execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemplo n.º 26
0
def seek_start_area(rgi_id,
                    name,
                    show=False,
                    path='',
                    ref=np.NaN,
                    adjust_term_elev=False,
                    legend=True,
                    instant_geometry_change=False):
    """ Set up an VAS model from scratch and run/test the start area seeking
    tasks. The result is a plot showing the modeled glacier area evolution for
    different start values. The plots can be displayed and/or stored to file.

    Parameters
    ----------
    rgi_id: string
        RGI ID denoting the glacier on which to perform the tasks
    name: string
        Name og glacier, since it is not always given (or correct) in RGI
    show: bool, optional, default=False
        Flag deciding whether or not to show the created plots.
    path: string, optional, default=''
        Path under which the modeled area plot should be stored.
    ref: float, optional, default=np.NaN
        Historic (1851) reference area with which a reference model run is
        performed.

    """
    # Initialization and load default parameter file
    vascaling.initialize()

    # compute RGI region and version from RGI IDs
    # assuming they all are all the same
    rgi_region = (rgi_id.split('-')[-1]).split('.')[0]
    rgi_version = (rgi_id.split('-')[0])[-2:-1]

    # specify working directory and output directory
    working_dir = os.path.abspath('../working_directories/start_area/')
    # output_dir = os.path.abspath('./vas_run_output')
    output_dir = os.path.abspath('../data/vas_run_output')
    # create working directory
    utils.mkdir(working_dir, reset=False)
    utils.mkdir(output_dir)
    # set path to working directory
    cfg.PATHS['working_dir'] = working_dir
    # set RGI version and region
    cfg.PARAMS['rgi_version'] = rgi_version
    # define how many grid points to use around the glacier,
    # if you expect the glacier to grow large use a larger border
    cfg.PARAMS['border'] = 20
    # we use HistAlp climate data
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # set the mb hyper parameters accordingly
    cfg.PARAMS['prcp_scaling_factor'] = 1.75
    cfg.PARAMS['temp_melt'] = -1.75
    cfg.PARAMS['run_mb_calibration'] = False

    # the bias is defined to be zero during the calibration process,
    # which is why we don't use it here to reproduce the results
    cfg.PARAMS['use_bias_for_run'] = True

    # get/downlaod the rgi entity including the outline shapefile
    rgi_df = utils.get_rgi_glacier_entities([rgi_id])
    # set name, if not delivered with RGI
    if rgi_df.loc[int(rgi_id[-5:]) - 1, 'Name'] is None:
        rgi_df.loc[int(rgi_id[-5:]) - 1, 'Name'] = name

    # get and set path to intersect shapefile
    intersects_db = utils.get_rgi_intersects_region_file(region=rgi_region)
    cfg.set_intersects_db(intersects_db)

    # initialize the GlacierDirectory
    gdir = workflow.init_glacier_directories(rgi_df)[0]

    # # DEM and GIS tasks
    # # get the path to the DEM file (will download if necessary)
    # dem = utils.get_topo_file(gdir.cenlon, gdir.cenlat)
    # print('DEM source: {}, path to DEM file: {}'.format(dem[1], dem[0][0]))
    # # set path in config file
    # cfg.PATHS['dem_file'] = dem[0][0]
    # cfg.PARAMS['border'] = 10
    # cfg.PARAMS['use_intersects'] = False

    # run GIS tasks
    gis.define_glacier_region(gdir)
    gis.glacier_masks(gdir)

    # process climate data
    climate.process_climate_data(gdir)

    #  compute local t* and the corresponding mu*
    vascaling.local_t_star(gdir)

    # create mass balance model
    mb_mod = vascaling.VAScalingMassBalance(gdir)

    # look at specific mass balance over climate data period
    min_hgt, max_hgt = vascaling.get_min_max_elevation(gdir)
    y0 = 1851
    y1 = 2014

    # run scalar minimization
    minimize_res = vascaling.find_start_area(
        gdir,
        adjust_term_elev=adjust_term_elev,
        instant_geometry_change=instant_geometry_change)
    # print(minimize_res)

    # stop script if minimization was not successful
    if minimize_res.status and False:
        sys.exit(minimize_res.status)

    # instance glacier with today's values
    model_ref = vascaling.VAScalingModel(year_0=gdir.rgi_date,
                                         area_m2_0=gdir.rgi_area_m2,
                                         min_hgt=min_hgt,
                                         max_hgt=max_hgt,
                                         mb_model=mb_mod)

    # instance guessed starting areas
    num = 9
    area_guess = np.linspace(1e6,
                             np.floor(gdir.rgi_area_m2 * 2),
                             num,
                             endpoint=True)
    # create empty containers
    area_list = list()
    volume_list = list()
    spec_mb_list = list()

    # iterate over all starting areas
    for area_ in area_guess:
        # instance iteration model
        model_guess = vascaling.VAScalingModel(year_0=gdir.rgi_date,
                                               area_m2_0=gdir.rgi_area_m2,
                                               min_hgt=min_hgt,
                                               max_hgt=max_hgt,
                                               mb_model=mb_mod)
        # set new starting values
        model_guess.create_start_glacier(area_,
                                         y0,
                                         adjust_term_elev=adjust_term_elev)
        # run model and save years and area
        best_guess_ds = model_guess.run_until_and_store(
            year_end=model_ref.year,
            instant_geometry_change=instant_geometry_change)
        # create series and store in container
        area_list.append(best_guess_ds.area_m2.to_dataframe()['area_m2'])
        volume_list.append(best_guess_ds.volume_m3.to_dataframe()['volume_m3'])
        spec_mb_list.append(best_guess_ds.spec_mb.to_dataframe()['spec_mb'])

    # create DataFrame
    area_df = pd.DataFrame(
        area_list, index=['{:.2f}'.format(a / 1e6) for a in area_guess])
    area_df.index.name = 'Start Area [km$^2$]'

    volume_df = pd.DataFrame(
        volume_list, index=['{:.2f}'.format(a / 1e6) for a in area_guess])
    volume_df.index.name = 'Start Area [km$^2$]'

    # set up model with resulted starting area
    model = vascaling.VAScalingModel(year_0=model_ref.year_0,
                                     area_m2_0=model_ref.area_m2_0,
                                     min_hgt=model_ref.min_hgt_0,
                                     max_hgt=model_ref.max_hgt,
                                     mb_model=model_ref.mb_model)
    model.create_start_glacier(minimize_res.x,
                               y0,
                               adjust_term_elev=adjust_term_elev)

    # run model with best guess initial area
    best_guess_ds = model.run_until_and_store(
        year_end=model_ref.year,
        instant_geometry_change=instant_geometry_change)
    # run model with historic reference area
    if ref:
        model.reset()
        model.create_start_glacier(ref * 1e6,
                                   y0,
                                   adjust_term_elev=adjust_term_elev)
        ref_ds = model.run_until_and_store(
            year_end=model_ref.year,
            instant_geometry_change=instant_geometry_change)

    # create figure and add axes
    fig = plt.figure(figsize=[5, 5])
    ax = fig.add_axes([0.125, 0.075, 0.85, 0.9])

    # plot model output
    ax = (area_df / 1e6).T.plot(legend=False, colormap='Spectral', ax=ax)

    # plot best guess
    ax.plot(
        best_guess_ds.time,
        best_guess_ds.area_m2 / 1e6,
        color='k',
        ls='--',
        lw=1.2,
        label=
        f'{best_guess_ds.area_m2.isel(time=0).values/1e6:.2f} km$^2$ (best result)'
    )
    # plot reference
    if ref:
        ax.plot(
            ref_ds.time,
            ref_ds.area_m2 / 1e6,
            color='k',
            ls='-.',
            lw=1.2,
            label=
            f'{ref_ds.area_m2.isel(time=0).values/1e6:.2f} km$^2$ (1850 ref.)')

    # plot 2003 reference line
    ax.axhline(
        model_ref.area_m2_0 / 1e6,
        c='k',
        ls=':',
        label=f'{model_ref.area_m2_0/1e6:.2f} km$^2$ ({gdir.rgi_date} obs.)')

    # add legend
    if legend:
        handels, labels = ax.get_legend_handles_labels()
        labels[:-3] = [r'{} km$^2$'.format(l) for l in labels[:-3]]
        leg = ax.legend(handels, labels, loc='upper right', ncol=2)
        # leg.set_title('Start area $A_0$', prop={'size': 12})

    # replot best guess estimate and reference (in case it lies below another
    # guess)
    ax.plot(best_guess_ds.time,
            best_guess_ds.area_m2 / 1e6,
            color='k',
            ls='--',
            lw=1.2)
    if ref:
        ax.plot(ref_ds.time, ref_ds.area_m2 / 1e6, color='k', ls='-.', lw=1.2)

    # labels, title
    ax.set_xlim([best_guess_ds.time.values[0], best_guess_ds.time.values[-1]])
    ax.set_xlabel('')
    ax.set_ylabel('Glacier area [km$^2$]')

    # save figure to file
    if path:
        fig.savefig(path)

    # show plot
    if show:
        plt.show()
    plt.clf()

    # plot and store volume evolution
    (volume_df / 1e9).T.plot(legend=False, colormap='viridis')
    plt.gcf().savefig(path[:-4] + '_volume.pdf')
Exemplo n.º 27
0
cfg.PATHS['working_dir'] = WORKING_DIR

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True

# Here we override some of the default parameters
# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 100

# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = False

# We use intersects
# Here we use the global file but there are regional files too (faster)
cfg.set_intersects_db(utils.get_rgi_intersects_region_file('00', version='5'))

# Pre-download other files which will be needed later
utils.get_cru_cl_file()
utils.get_cru_file(var='tmp')
utils.get_cru_file(var='pre')

# Download the RGI file for the run
# We us a set of four glaciers here but this could be an entire RGI region,
# or any glacier list you'd like to model
dl = 'https://cluster.klima.uni-bremen.de/~fmaussion/misc/RGI_example_glaciers.zip'
with zipfile.ZipFile(utils.file_downloader(dl)) as zf:
    zf.extractall(WORKING_DIR)
rgidf = salem.read_shapefile(
    path.join(WORKING_DIR, 'RGI_example_glaciers', 'RGI_example_glaciers.shp'))
Exemplo n.º 28
0
def run_prepro_levels(rgi_version=None,
                      rgi_reg=None,
                      border=None,
                      output_folder='',
                      working_dir='',
                      dem_source='',
                      is_test=False,
                      demo=False,
                      test_rgidf=None,
                      test_intersects_file=None,
                      test_topofile=None,
                      test_crudir=None,
                      disable_mp=False,
                      timeout=0,
                      max_level=4,
                      logging_level='WORKFLOW'):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    dem_source : str
        which DEM source to use: default, SOURCE_NAME or ALL
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    disable_mp : bool
        disable multiprocessing
    max_level : int
        the maximum pre-processing level before stopping
    logging_level : str
        the logging level to use (DEBUG, INFO, WARNING, WORKFLOW)
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Input check
    if max_level not in [1, 2, 3, 4]:
        raise InvalidParamsError('max_level should be one of [1, 2, 3, 4]')

    # Time
    start = time.time()

    def _time_log():
        # Log util
        m, s = divmod(time.time() - start, 60)
        h, m = divmod(m, 60)
        log.workflow('OGGM prepro_levels is done! Time needed: '
                     '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level=logging_level)

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = not disable_mp

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # Timeout
    cfg.PARAMS['task_timeout'] = timeout

    # For statistics
    climate_periods = [1920, 1960, 2000]

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    rgi_dir_name = 'RGI{}'.format(rgi_version)
    border_dir_name = 'b_{:03d}'.format(border)
    base_dir = os.path.join(output_folder, rgi_dir_name, border_dir_name)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DATA['demo_glaciers'].index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(
            utils.get_rgi_region_file(rgi_reg, version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(4)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # L1 - initialize working directories
    # Which DEM source?
    if dem_source.upper() == 'ALL':
        # This is the complex one, just do the job an leave
        log.workflow('Running prepro on ALL sources')
        for i, s in enumerate(utils.DEM_SOURCES):
            rs = i == 0
            rgidf['DEM_SOURCE'] = s
            log.workflow('Running prepro on sources: {}'.format(s))
            gdirs = workflow.init_glacier_regions(rgidf, reset=rs, force=rs)
            workflow.execute_entity_task(_rename_dem_folder, gdirs, source=s)

        # Compress all in output directory
        l_base_dir = os.path.join(base_dir, 'L1')
        workflow.execute_entity_task(utils.gdir_to_tar,
                                     gdirs,
                                     delete=False,
                                     base_dir=l_base_dir)
        utils.base_dir_to_tar(l_base_dir)

        _time_log()
        return

    if dem_source:
        # Force a given source
        rgidf['DEM_SOURCE'] = dem_source.upper()

    # L1 - go
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L1', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L1 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L1')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 1:
        _time_log()
        return

    # L2 - Tasks
    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    workflow.execute_entity_task(tasks.process_cru_data, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L2', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L2 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L2')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 2:
        _time_log()
        return

    # L3 - Tasks
    task_list = [
        tasks.glacier_masks, tasks.compute_centerlines,
        tasks.initialize_flowlines, tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape, tasks.catchment_area,
        tasks.catchment_intersections, tasks.catchment_width_geom,
        tasks.catchment_width_correction, tasks.local_t_star,
        tasks.mu_star_calibration, tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion, tasks.filter_inversion_output,
        tasks.init_present_time_glacier
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=climate_periods,
                                     path=opath)

    # L3 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 3:
        _time_log()
        return

    # L4 - No tasks: add some stats for consistency and make the dirs small
    sum_dir = os.path.join(base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Copy mini data to new dir
    base_dir = os.path.join(base_dir, 'L4')
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir,
                                              gdirs,
                                              base_dir=base_dir)

    # L4 OK - compress all in output directory
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=True)
    utils.base_dir_to_tar(base_dir)

    _time_log()
Exemplo n.º 29
0
def run_prepro_levels(rgi_version=None,
                      rgi_reg=None,
                      border=None,
                      output_folder='',
                      working_dir='',
                      dem_source='',
                      is_test=False,
                      test_ids=None,
                      demo=False,
                      test_rgidf=None,
                      test_intersects_file=None,
                      test_topofile=None,
                      disable_mp=False,
                      params_file=None,
                      elev_bands=False,
                      match_regional_geodetic_mb=False,
                      match_geodetic_mb_per_glacier=False,
                      evolution_model='fl_sia',
                      centerlines_only=False,
                      override_params=None,
                      add_consensus=False,
                      start_level=None,
                      start_base_url=None,
                      max_level=5,
                      ref_tstars_base_url='',
                      logging_level='WORKFLOW',
                      disable_dl_verify=False,
                      dynamic_spinup=False,
                      continue_on_error=True):
    """Generate the preprocessed OGGM glacier directories for this OGGM version

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    dem_source : str
        which DEM source to use: default, SOURCE_NAME or ALL
    working_dir : str
        path to the OGGM working directory
    ref_tstars_base_url : str
        url where to find the pre-calibrated reference tstar list.
        Required as of v1.4.
    params_file : str
        path to the OGGM parameter file (to override defaults)
    is_test : bool
        to test on a couple of glaciers only!
    test_ids : list
        if is_test: list of ids to process
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    disable_mp : bool
        disable multiprocessing
    elev_bands : bool
        compute all flowlines based on the Huss&Hock 2015 method instead
        of the OGGM default, which is a mix of elev_bands and centerlines.
    centerlines_only : bool
        compute all flowlines based on the OGGM centerline(s) method instead
        of the OGGM default, which is a mix of elev_bands and centerlines.
    match_regional_geodetic_mb : str
        match the regional mass-balance estimates at the regional level
        ('hugonnet': Hugonnet et al., 2020 or 'zemp': Zemp et al., 2019).
    match_geodetic_mb_per_glacier : str
        match the mass-balance estimates at the glacier level
        (currently only 'hugonnet': Hugonnet et al., 2020).
    evolution_model : str
        which geometry evolution model to use: `fl_sia` (default),
        or `massredis` (mass redistribution curve).
    add_consensus : bool
        adds (reprojects) the consensus estimates thickness to the glacier
        directories. With elev_bands=True, the data will also be binned.
    start_level : int
        the pre-processed level to start from (default is to start from
        scratch). If set, you'll need to indicate start_base_url as well.
    start_base_url : str
        the pre-processed base-url to fetch the data from.
    max_level : int
        the maximum pre-processing level before stopping
    logging_level : str
        the logging level to use (DEBUG, INFO, WARNING, WORKFLOW)
    override_params : dict
        a dict of parameters to override.
    disable_dl_verify : bool
        disable the hash verification of OGGM downloads
    dynamic_spinup: str
        include a dynamic spinup matching 'area' OR 'volume' at the RGI-date
    """

    # Input check
    if max_level not in [1, 2, 3, 4, 5]:
        raise InvalidParamsError('max_level should be one of [1, 2, 3, 4, 5]')

    if start_level is not None:
        if start_level not in [0, 1, 2]:
            raise InvalidParamsError('start_level should be one of [0, 1, 2]')
        if start_level > 0 and start_base_url is None:
            raise InvalidParamsError('With start_level, please also indicate '
                                     'start_base_url')
    else:
        start_level = 0

    if match_regional_geodetic_mb and match_geodetic_mb_per_glacier:
        raise InvalidParamsError(
            'match_regional_geodetic_mb incompatible with '
            'match_geodetic_mb_per_glacier!')

    if match_geodetic_mb_per_glacier and match_geodetic_mb_per_glacier != 'hugonnet':
        raise InvalidParamsError('Currently only `hugonnet` is available for '
                                 'match_geodetic_mb_per_glacier.')

    if evolution_model not in ['fl_sia', 'massredis']:
        raise InvalidParamsError('evolution_model should be one of '
                                 "['fl_sia', 'massredis'].")

    if dynamic_spinup and dynamic_spinup not in ['area', 'volume']:
        raise InvalidParamsError(f"Dynamic spinup option '{dynamic_spinup}' "
                                 "not supported")

    if dynamic_spinup and evolution_model == 'massredis':
        raise InvalidParamsError("Dynamic spinup is not working/tested"
                                 "with massredis!")

    # Time
    start = time.time()

    def _time_log():
        # Log util
        m, s = divmod(time.time() - start, 60)
        h, m = divmod(m, 60)
        log.workflow('OGGM prepro_levels is done! Time needed: '
                     '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))

    # Local paths
    if override_params is None:
        override_params = {}

    utils.mkdir(working_dir)
    override_params['working_dir'] = working_dir

    # Initialize OGGM and set up the run parameters
    cfg.initialize(file=params_file,
                   params=override_params,
                   logging_level=logging_level,
                   future=True)

    if match_geodetic_mb_per_glacier and (cfg.PARAMS['hydro_month_nh'] != 1 or
                                          cfg.PARAMS['hydro_month_sh'] != 1):
        raise InvalidParamsError('We recommend to set hydro_month_nh and sh '
                                 'to 1 for the geodetic MB calibration per '
                                 'glacier.')

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = not disable_mp

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = continue_on_error

    # Check for the integrity of the files OGGM downloads at run time
    # For large files (e.g. using a 1 tif DEM like ALASKA) calculating the hash
    # takes a long time, so deactivating this can make sense
    cfg.PARAMS['dl_verify'] = not disable_dl_verify

    # Other things that make sense
    cfg.PARAMS['store_model_geometry'] = True

    # Log the parameters
    msg = '# OGGM Run parameters:'
    for k, v in cfg.PARAMS.items():
        if type(v) in [pd.DataFrame, dict]:
            continue
        msg += '\n    {}: {}'.format(k, v)
    log.workflow(msg)

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    output_base_dir = os.path.join(output_folder, 'RGI{}'.format(rgi_version),
                                   'b_{:03d}'.format(border))

    # Add a package version file
    utils.mkdir(output_base_dir)
    opath = os.path.join(output_base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DATA['demo_glaciers'].index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(
            utils.get_rgi_region_file(rgi_reg, version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)

        # Some RGI input quality checks - this is based on visual checks
        # of large glaciers in the RGI
        ids_to_ice_cap = [
            'RGI60-05.10315',  # huge Greenland ice cap
            'RGI60-03.01466',  # strange thing next to Devon
            'RGI60-09.00918',  # Academy of sciences Ice cap
            'RGI60-09.00969',
            'RGI60-09.00958',
            'RGI60-09.00957',
        ]
        rgidf.loc[rgidf.RGIId.isin(ids_to_ice_cap), 'Form'] = '1'

        # In AA almost all large ice bodies are actually ice caps
        if rgi_reg == '19':
            rgidf.loc[rgidf.Area > 100, 'Form'] = '1'

        # For greenland we omit connectivity level 2
        if rgi_reg == '05':
            rgidf = rgidf.loc[rgidf['Connect'] != 2]
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        if test_ids is not None:
            rgidf = rgidf.loc[rgidf.RGIId.isin(test_ids)]
        else:
            rgidf = rgidf.sample(4)

        if max_level > 2:
            # Also use ref tstars
            utils.apply_test_ref_tstars()

    if max_level > 2 and ref_tstars_base_url:
        workflow.download_ref_tstars(base_url=ref_tstars_base_url)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # L0 - go
    if start_level == 0:
        gdirs = workflow.init_glacier_directories(rgidf,
                                                  reset=True,
                                                  force=True)

        # Glacier stats
        sum_dir = os.path.join(output_base_dir, 'L0', 'summary')
        utils.mkdir(sum_dir)
        opath = os.path.join(sum_dir,
                             'glacier_statistics_{}.csv'.format(rgi_reg))
        utils.compile_glacier_statistics(gdirs, path=opath)

        # L0 OK - compress all in output directory
        log.workflow('L0 done. Writing to tar...')
        level_base_dir = os.path.join(output_base_dir, 'L0')
        workflow.execute_entity_task(utils.gdir_to_tar,
                                     gdirs,
                                     delete=False,
                                     base_dir=level_base_dir)
        utils.base_dir_to_tar(level_base_dir)
        if max_level == 0:
            _time_log()
            return
    else:
        gdirs = workflow.init_glacier_directories(
            rgidf,
            reset=True,
            force=True,
            from_prepro_level=start_level,
            prepro_border=border,
            prepro_rgi_version=rgi_version,
            prepro_base_url=start_base_url)

    # L1 - Add dem files
    if start_level == 0:
        if test_topofile:
            cfg.PATHS['dem_file'] = test_topofile

        # Which DEM source?
        if dem_source.upper() == 'ALL':
            # This is the complex one, just do the job and leave
            log.workflow('Running prepro on ALL sources')
            for i, s in enumerate(utils.DEM_SOURCES):
                rs = i == 0
                log.workflow('Running prepro on sources: {}'.format(s))
                gdirs = workflow.init_glacier_directories(rgidf,
                                                          reset=rs,
                                                          force=rs)
                workflow.execute_entity_task(tasks.define_glacier_region,
                                             gdirs,
                                             source=s)
                workflow.execute_entity_task(_rename_dem_folder,
                                             gdirs,
                                             source=s)

            # make a GeoTiff mask of the glacier, choose any source
            workflow.execute_entity_task(gis.rasterio_glacier_mask,
                                         gdirs,
                                         source='ALL')

            # Compress all in output directory
            level_base_dir = os.path.join(output_base_dir, 'L1')
            workflow.execute_entity_task(utils.gdir_to_tar,
                                         gdirs,
                                         delete=False,
                                         base_dir=level_base_dir)
            utils.base_dir_to_tar(level_base_dir)

            _time_log()
            return

        # Force a given source
        source = dem_source.upper() if dem_source else None

        # L1 - go
        workflow.execute_entity_task(tasks.define_glacier_region,
                                     gdirs,
                                     source=source)

        # Glacier stats
        sum_dir = os.path.join(output_base_dir, 'L1', 'summary')
        utils.mkdir(sum_dir)
        opath = os.path.join(sum_dir,
                             'glacier_statistics_{}.csv'.format(rgi_reg))
        utils.compile_glacier_statistics(gdirs, path=opath)

        # L1 OK - compress all in output directory
        log.workflow('L1 done. Writing to tar...')
        level_base_dir = os.path.join(output_base_dir, 'L1')
        workflow.execute_entity_task(utils.gdir_to_tar,
                                     gdirs,
                                     delete=False,
                                     base_dir=level_base_dir)
        utils.base_dir_to_tar(level_base_dir)
        if max_level == 1:
            _time_log()
            return

    # L2 - Tasks
    if start_level <= 1:
        # Check which glaciers will be processed as what
        if elev_bands:
            gdirs_band = gdirs
            gdirs_cent = []
        elif centerlines_only:
            gdirs_band = []
            gdirs_cent = gdirs
        else:
            # Default is to centerlines_only, but it used to be a mix
            # (e.g. bands for ice caps, etc)
            # I still keep this logic here in case we want to mix again
            gdirs_band = []
            gdirs_cent = gdirs

        log.workflow('Start flowline processing with: '
                     'N centerline type: {}, '
                     'N elev bands type: {}.'
                     ''.format(len(gdirs_cent), len(gdirs_band)))

        # HH2015 method
        workflow.execute_entity_task(tasks.simple_glacier_masks, gdirs_band)

        # Centerlines OGGM
        workflow.execute_entity_task(tasks.glacier_masks, gdirs_cent)

        if add_consensus:
            from oggm.shop.bedtopo import add_consensus_thickness
            workflow.execute_entity_task(add_consensus_thickness, gdirs_band)
            workflow.execute_entity_task(add_consensus_thickness, gdirs_cent)

            # Elev bands with var data
            vn = 'consensus_ice_thickness'
            workflow.execute_entity_task(tasks.elevation_band_flowline,
                                         gdirs_band,
                                         bin_variables=vn)
            workflow.execute_entity_task(
                tasks.fixed_dx_elevation_band_flowline,
                gdirs_band,
                bin_variables=vn)
        else:
            # HH2015 method without it
            task_list = [
                tasks.elevation_band_flowline,
                tasks.fixed_dx_elevation_band_flowline,
            ]
            for task in task_list:
                workflow.execute_entity_task(task, gdirs_band)

        # Centerlines OGGM
        task_list = [
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            workflow.execute_entity_task(task, gdirs_cent)

        # Same for all glaciers
        if border >= 20:
            task_list = [
                tasks.compute_downstream_line,
                tasks.compute_downstream_bedshape,
            ]
            for task in task_list:
                workflow.execute_entity_task(task, gdirs)
        else:
            log.workflow('L2: for map border values < 20, wont compute '
                         'downstream lines.')

        # Glacier stats
        sum_dir = os.path.join(output_base_dir, 'L2', 'summary')
        utils.mkdir(sum_dir)
        opath = os.path.join(sum_dir,
                             'glacier_statistics_{}.csv'.format(rgi_reg))
        utils.compile_glacier_statistics(gdirs, path=opath)

        # And for level 2: shapes
        if len(gdirs_cent) > 0:
            opath = os.path.join(sum_dir, 'centerlines_{}.shp'.format(rgi_reg))
            utils.write_centerlines_to_shape(gdirs_cent,
                                             to_tar=True,
                                             path=opath)

        # L2 OK - compress all in output directory
        log.workflow('L2 done. Writing to tar...')
        level_base_dir = os.path.join(output_base_dir, 'L2')
        workflow.execute_entity_task(utils.gdir_to_tar,
                                     gdirs,
                                     delete=False,
                                     base_dir=level_base_dir)
        utils.base_dir_to_tar(level_base_dir)
        if max_level == 2:
            _time_log()
            return

    # L3 - Tasks
    sum_dir = os.path.join(output_base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)

    # Climate
    workflow.execute_entity_task(tasks.process_climate_data, gdirs)

    if cfg.PARAMS['climate_qc_months'] > 0:
        workflow.execute_entity_task(tasks.historical_climate_qc, gdirs)

    if match_geodetic_mb_per_glacier:
        utils.get_geodetic_mb_dataframe()  # Small optim to avoid concurrency
        workflow.execute_entity_task(
            tasks.mu_star_calibration_from_geodetic_mb, gdirs)
        workflow.execute_entity_task(tasks.apparent_mb_from_any_mb, gdirs)
    else:
        workflow.execute_entity_task(tasks.local_t_star, gdirs)
        workflow.execute_entity_task(tasks.mu_star_calibration, gdirs)

    # Inversion: we match the consensus
    filter = border >= 20
    workflow.calibrate_inversion_from_consensus(gdirs,
                                                apply_fs_on_mismatch=True,
                                                error_on_mismatch=False,
                                                filter_inversion_output=filter)

    # Do we want to match geodetic estimates?
    # This affects only the bias so we can actually do this *after*
    # the inversion, but we really want to take calving into account here
    if match_regional_geodetic_mb:
        opath = os.path.join(
            sum_dir, 'fixed_geometry_mass_balance_'
            'before_match_{}.csv'.format(rgi_reg))
        utils.compile_fixed_geometry_mass_balance(gdirs, path=opath)
        workflow.match_regional_geodetic_mb(gdirs,
                                            rgi_reg=rgi_reg,
                                            dataset=match_regional_geodetic_mb)

    # We get ready for modelling
    if border >= 20:
        workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)
    else:
        log.workflow(
            'L3: for map border values < 20, wont initialize glaciers '
            'for the run.')
    # Glacier stats
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir,
                         'fixed_geometry_mass_balance_{}.csv'.format(rgi_reg))
    utils.compile_fixed_geometry_mass_balance(gdirs, path=opath)

    # L3 OK - compress all in output directory
    log.workflow('L3 done. Writing to tar...')
    level_base_dir = os.path.join(output_base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=level_base_dir)
    utils.base_dir_to_tar(level_base_dir)
    if max_level == 3:
        _time_log()
        return
    if border < 20:
        log.workflow('L3: for map border values < 20, wont compute L4 and L5.')
        _time_log()
        return

    # L4 - No tasks: add some stats for consistency and make the dirs small
    sum_dir_L3 = sum_dir
    sum_dir = os.path.join(output_base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)

    # Copy L3 files for consistency
    for bn in [
            'glacier_statistics', 'climate_statistics',
            'fixed_geometry_mass_balance'
    ]:
        ipath = os.path.join(sum_dir_L3, bn + '_{}.csv'.format(rgi_reg))
        opath = os.path.join(sum_dir, bn + '_{}.csv'.format(rgi_reg))
        shutil.copyfile(ipath, opath)

    # Copy mini data to new dir
    mini_base_dir = os.path.join(working_dir, 'mini_perglacier',
                                 'RGI{}'.format(rgi_version),
                                 'b_{:03d}'.format(border))
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir,
                                              gdirs,
                                              base_dir=mini_base_dir)

    # L4 OK - compress all in output directory
    log.workflow('L4 done. Writing to tar...')
    level_base_dir = os.path.join(output_base_dir, 'L4')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 mini_gdirs,
                                 delete=False,
                                 base_dir=level_base_dir)
    utils.base_dir_to_tar(level_base_dir)
    if max_level == 4:
        _time_log()
        return

    # L5 - spinup run in mini gdirs
    gdirs = mini_gdirs

    # Get end date. The first gdir might have blown up, try some others
    i = 0
    while True:
        if i >= len(gdirs):
            raise RuntimeError('Found no valid glaciers!')
        try:
            y0 = gdirs[i].get_climate_info()['baseline_hydro_yr_0']
            # One adds 1 because the run ends at the end of the year
            ye = gdirs[i].get_climate_info()['baseline_hydro_yr_1'] + 1
            break
        except BaseException:
            i += 1

    # Which model?
    if evolution_model == 'massredis':
        from oggm.core.flowline import MassRedistributionCurveModel
        evolution_model = MassRedistributionCurveModel
    else:
        from oggm.core.flowline import FluxBasedModel
        evolution_model = FluxBasedModel

    # OK - run
    if dynamic_spinup:
        workflow.execute_entity_task(
            tasks.run_dynamic_spinup,
            gdirs,
            evolution_model=evolution_model,
            minimise_for=dynamic_spinup,
            precision_percent=1,
            output_filesuffix='_dynamic_spinup',
        )
        workflow.execute_entity_task(tasks.run_from_climate_data,
                                     gdirs,
                                     min_ys=y0,
                                     ye=ye,
                                     evolution_model=evolution_model,
                                     init_model_filesuffix='_dynamic_spinup',
                                     output_filesuffix='_hist_spin')
        workflow.execute_entity_task(tasks.merge_consecutive_run_outputs,
                                     gdirs,
                                     input_filesuffix_1='_dynamic_spinup',
                                     input_filesuffix_2='_hist_spin',
                                     output_filesuffix='_historical_spinup',
                                     delete_input=True)

    workflow.execute_entity_task(tasks.run_from_climate_data,
                                 gdirs,
                                 min_ys=y0,
                                 ye=ye,
                                 evolution_model=evolution_model,
                                 output_filesuffix='_historical')

    # Now compile the output
    sum_dir = os.path.join(output_base_dir, 'L5', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, f'historical_run_output_{rgi_reg}.nc')
    utils.compile_run_output(gdirs, path=opath, input_filesuffix='_historical')

    if dynamic_spinup:
        opath = os.path.join(sum_dir,
                             f'historical_spinup_run_output_{rgi_reg}.nc')
        utils.compile_run_output(gdirs,
                                 path=opath,
                                 input_filesuffix='_historical_spinup')

    # Glacier statistics we recompute here for error analysis
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Other stats for consistency
    for bn in ['climate_statistics', 'fixed_geometry_mass_balance']:
        ipath = os.path.join(sum_dir_L3, bn + '_{}.csv'.format(rgi_reg))
        opath = os.path.join(sum_dir, bn + '_{}.csv'.format(rgi_reg))
        shutil.copyfile(ipath, opath)

    # Add the extended files
    pf = os.path.join(sum_dir, 'historical_run_output_{}.nc'.format(rgi_reg))
    mf = os.path.join(sum_dir,
                      'fixed_geometry_mass_balance_{}.csv'.format(rgi_reg))
    # This is crucial - extending calving only possible with L3 data!!!
    sf = os.path.join(sum_dir_L3, 'glacier_statistics_{}.csv'.format(rgi_reg))
    opath = os.path.join(
        sum_dir, 'historical_run_output_extended_{}.nc'.format(rgi_reg))
    utils.extend_past_climate_run(past_run_file=pf,
                                  fixed_geometry_mb_file=mf,
                                  glacier_statistics_file=sf,
                                  path=opath)

    # L5 OK - compress all in output directory
    log.workflow('L5 done. Writing to tar...')
    level_base_dir = os.path.join(output_base_dir, 'L5')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=level_base_dir)
    utils.base_dir_to_tar(level_base_dir)

    _time_log()
Exemplo n.º 30
0
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 80
cfg.PARAMS['optimize_inversion_params'] = True

# Set to True for cluster runs
if Cluster:
    cfg.PARAMS['use_multiprocessing'] = True
    cfg.PARAMS['continue_on_error'] = True
else:
    cfg.PARAMS['use_multiprocessing'] = False
    cfg.PARAMS['continue_on_error'] = False

# We use intersects
# (this is slow, it could be replaced with a subset of the global file)
rgi_dir = utils.get_rgi_intersects_dir()
cfg.set_intersects_db(os.path.join(rgi_dir, '00_rgi50_AllRegs',
                                'intersects_rgi50_AllRegs.shp'))

# Pre-download other files which will be needed later
utils.get_cru_cl_file()
utils.get_cru_file(var='tmp')
utils.get_cru_file(var='pre')

# Some globals for more control on what to run
RUN_GIS_mask = False
RUN_GIS_PREPRO = False # run GIS pre-processing tasks (before climate)
RUN_CLIMATE_PREPRO = False # run climate pre-processing tasks
RUN_INVERSION = False  # run bed inversion

# Read RGI file
rgidf = salem.read_shapefile(RGI_FILE, cached=True)
Exemplo n.º 31
0
def equilibrium_run_vas(rgi_ids,
                        use_random_mb=True,
                        path=True,
                        temp_biases=(0, +0.5, -0.5),
                        use_bias_for_run=False,
                        suffixes=('_bias_zero', '_bias_p', '_bias_n'),
                        tstar=None,
                        vas_c_length_m=None,
                        vas_c_area_m2=None,
                        **kwargs):
    """ The routine runs all steps for the equilibrium experiments using the
    volume/area scaling model:
    - OGGM preprocessing, including initialization, GIS tasks, climate tasks and
      massbalance tasks.
    - Run model for all glaciers with constant (or random) massbalance model
      over 3000 years (default value).
    - Process the model output dataset(s), i.e. normalization, average/sum, ...

    The final dataset containing all results is returned. Given a path is is
    also stored to file.

    Parameters
    ----------
    rgi_ids: array-like
        List of RGI IDs for which the equilibrium experiments are performed.
    use_random_mb: bool, optional, default=True
        Choose between random massbalance model and constant massbalance model.
    path: bool or str, optional, default=True
        If a path is given (or True), the resulting dataset is stored to file.
    temp_biases: array-like, optional, default=(0, +0.5, -0.5)
        List of temperature biases (float, in degC) for the mass balance model.
    suffixes: array-like, optional, default=['_normal', '_bias_p', '_bias_n']
        Descriptive suffixes corresponding to the given temperature biases.
    tstar: float, optional, default=None
        'Equilibrium year' used for the mass balance calibration.
    vas_c_length_m: float, optional, default=None
        Scaling constant for volume/length scaling
    vas_c_area_m2: float, optional, default=None
        Scaling constant for volume/area scaling
    kwargs:
        Additional key word arguments for the `run_random_climate` or
        `run_constant_climate` routines of the vascaling module.

    Returns
    -------
    Dataset containing yearly values of all glacier geometries.

    """
    # assert correct output file suffixes for temp biases
    if len(temp_biases) != len(suffixes):
        raise RuntimeError("Each given temperature bias must have its "
                           "corresponding suffix")

    # OGGM preprocessing
    # ------------------

    # compute RGI region and version from RGI IDs
    # assuming all they are all the same
    rgi_region = (rgi_ids[0].split('-')[-1]).split('.')[0]
    rgi_version = (rgi_ids[0].split('-')[0])[-2:]

    # load default parameter file
    cfg.initialize()

    # create working directory
    wdir = '/Users/oberrauch/work/master/working_directories/'
    wdir += 'test_cluster'
    if not os.path.exists(wdir):
        os.makedirs(wdir)
    # shutil.rmtree(wdir)
    # os.makedirs(wdir)
    # set path to working directory
    cfg.PATHS['working_dir'] = wdir
    # set RGI verion and region
    cfg.PARAMS['rgi_version'] = rgi_version
    # define how many grid points to use around the glacier,
    # if you expect the glacier to grow large use a larger border
    cfg.PARAMS['border'] = 120
    # we use HistAlp climate data
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # set the mb hyper parameters accordingly
    cfg.PARAMS['prcp_scaling_factor'] = 1.75
    cfg.PARAMS['temp_melt'] = -1.75
    # change scaling constants for lenght and area
    if vas_c_length_m:
        cfg.PARAMS['vas_c_length_m'] = vas_c_length_m
    if vas_c_area_m2:
        cfg.PARAMS['vas_c_area_m2'] = vas_c_area_m2
    # the bias is defined to be zero during the calibration process,
    # which is why we don't use it here to reproduce the results
    cfg.PARAMS['use_bias_for_run'] = use_bias_for_run

    # read RGI entry for the glaciers as DataFrame
    # containing the outline area as shapefile
    rgidf = utils.get_rgi_glacier_entities(rgi_ids)

    # get and set path to intersect shapefile
    intersects_db = utils.get_rgi_intersects_region_file(region=rgi_region)
    cfg.set_intersects_db(intersects_db)

    # sort by area for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)
    cfg.PARAMS['use_multiprocessing'] = True
    # operational run, all glaciers should run
    cfg.PARAMS['continue_on_error'] = True

    # initialize the GlacierDirectory
    gdirs = workflow.init_glacier_directories(rgidf, reset=False, force=True)

    # define the local grid and glacier mask
    workflow.execute_entity_task(gis.define_glacier_region, gdirs)
    workflow.execute_entity_task(gis.glacier_masks, gdirs)
    # process the given climate file
    workflow.execute_entity_task(climate.process_climate_data, gdirs)
    # compute local t* and the corresponding mu*
    workflow.execute_entity_task(vascaling.local_t_star,
                                 gdirs,
                                 tstar=tstar,
                                 bias=0)

    # Run model with constant/random mass balance model
    # -------------------------------------------------

    # use t* as center year, even if specified differently
    kwargs['y0'] = tstar
    # run for 3000 years if not specified otherwise
    kwargs.setdefault('nyears', 3000)

    if use_random_mb:
        # set random seed to get reproducible results
        kwargs.setdefault('seed', 12)

        # run RandomMassBalance model centered around t*, once without
        # temperature bias and once with positive and negative temperature bias
        # of 0.5 °C each (per default).
        for suffix, temp_bias in zip(suffixes, temp_biases):
            workflow.execute_entity_task(vascaling.run_random_climate,
                                         gdirs,
                                         temperature_bias=temp_bias,
                                         output_filesuffix=suffix,
                                         **kwargs)
    else:
        # run ConstantMassBalance model centered around t*, once without
        # temperature bias and once with positive and negative temperature bias
        # of 0.5 °C each (per default).
        for suffix, temp_bias in zip(suffixes, temp_biases):
            workflow.execute_entity_task(vascaling.run_constant_climate,
                                         gdirs,
                                         temperature_bias=temp_bias,
                                         output_filesuffix=suffix,
                                         **kwargs)

    # Process output dataset(s)
    # -------------------------

    # create empty container
    ds = list()
    # iterate over all temperature biases/suffixes
    for suffix in suffixes:
        # compile the output for each run
        ds_ = utils.compile_run_output(np.atleast_1d(gdirs),
                                       input_filesuffix=suffix,
                                       path=False)
        # add to container
        ds.append(ds_)

    # concat the single output datasets into one,
    # with temperature bias as coordinate
    ds = xr.concat(ds, pd.Index(temp_biases, name='temp_bias'))
    # add model type as coordinate
    ds.coords['model'] = 'vas'
    # add mb model type as coordinate
    ds.coords['mb_model'] = 'random' if use_random_mb else 'constant'

    # compute mean and sum over all glaciers
    ds_mean = ds.mean(dim='rgi_id')
    ds_mean.coords['rgi_id'] = 'mean'
    ds_sum = ds.sum(dim='rgi_id')
    ds_sum.coords['rgi_id'] = 'sum'
    # add to dataset
    ds = xr.concat([ds, ds_mean, ds_sum], dim='rgi_id')

    # normalize glacier geometries (length/area/volume) with start value
    ds_normal = normalize_ds_with_start(ds)
    # add coordinate to distinguish between normalized and absolute values
    ds.coords['normalized'] = int(False)
    ds_normal.coords['normalized'] = int(True)

    # combine datasets
    ds = xr.concat([ds, ds_normal], 'normalized')

    # store datasets
    if path:
        if path is True:
            mb = 'random' if use_random_mb else 'constant'
            path = os.path.join(cfg.PATHS['working_dir'],
                                'run_output_{}_vas.nc'.format(mb))

        ds.to_netcdf(path)

    return ds
Exemplo n.º 32
0
import geopandas as gpd
import oggm
from oggm import cfg, tasks
from oggm.utils import get_demo_file

# Set up the input data for this example
cfg.initialize()
cfg.PATHS['working_dir'] = oggm.utils.get_temp_dir('oggmcontrib_inv')
cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))

# Glacier directory for Hintereisferner in Austria
entity = gpd.read_file(get_demo_file('Hintereisferner_RGI5.shp')).iloc[0]
gdir = oggm.GlacierDirectory(entity)

# The usual OGGM preprecessing
tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
Exemplo n.º 33
0
def climate_run_fl(rgi_ids,
                   path=True,
                   temp_biases=[0, +0.5, -0.5],
                   use_bias_for_run=False,
                   suffixes=['_bias_zero', '_bias_p', '_bias_n'],
                   tstar=None,
                   nyears=None,
                   **kwargs):
    """Computes 'only' the massbalance in analogy to the `equilibrium_run_...`
    routines, without running the evolution (flowline) model.

    Dataset containing yearly values of specific mass balance is returned.

    Parameters
    ----------
    rgi_ids: array-like
        List of RGI IDs for which the equilibrium experiments are performed.
    path: bool or str, optional, default=True
        If a path is given (or True), the resulting dataset is stored to file.
    temp_biases: array-like, optional, default=(0, +0.5, -0.5)
        List of temperature biases (float, in degC) for the mass balance model.
    suffixes: array-like, optional, default=['_normal', '_bias_p', '_bias_n']
        Descriptive suffixes corresponding to the given temperature biases.
    tstar: float
        'Equilibrium year' used for the mass balance calibration.
    nyears: int, optional, default=None
        Number of years for which to compute the random mass balance
    kwargs:
        Additional key word arguments for massbalance model.

    Returns
    -------
    Dataset containing yearly values of specific massbalance.
    """

    # assert correct output file suffixes for temp biases
    if len(temp_biases) != len(suffixes):
        raise RuntimeError("Each given temperature bias must have its "
                           "corresponding suffix")

    # compute RGI region and version from RGI IDs
    # assuming all they are all the same
    rgi_region = (rgi_ids[0].split('-')[-1]).split('.')[0]
    rgi_version = (rgi_ids[0].split('-')[0])[-2:]

    # load default parameter file
    cfg.initialize()

    # create working directory
    wdir = '/Users/oberrauch/work/master/working_directories/'
    wdir += 'test_cluster'
    if not os.path.exists(wdir):
        os.makedirs(wdir)
    # shutil.rmtree(wdir)
    # os.makedirs(wdir)
    # set path to working directory
    cfg.PATHS['working_dir'] = wdir
    # set RGI verion and region
    cfg.PARAMS['rgi_version'] = rgi_version
    # define how many grid points to use around the glacier,
    # if you expect the glacier to grow large use a larger border
    cfg.PARAMS['border'] = 120
    # we use HistAlp climate data
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # set the mb hyper parameters accordingly
    cfg.PARAMS['prcp_scaling_factor'] = 1.75
    cfg.PARAMS['temp_melt'] = -1.75
    # the bias is defined to be zero during the calibration process,
    # which is why we don't use it here to reproduce the results
    cfg.PARAMS['use_bias_for_run'] = use_bias_for_run

    # operational run, all glaciers should run
    cfg.PARAMS['continue_on_error'] = True

    # read RGI entry for the glaciers as DataFrame
    # containing the outline area as shapefile
    rgidf = utils.get_rgi_glacier_entities(rgi_ids)

    # get and set path to intersect shapefile
    intersects_db = utils.get_rgi_intersects_region_file(region=rgi_region)
    cfg.set_intersects_db(intersects_db)

    # initialize the GlacierDirectory
    gdirs = workflow.init_glacier_directories(rgidf, reset=False, force=True)

    # run gis tasks
    workflow.gis_prepro_tasks(gdirs)
    # run climate tasks
    workflow.execute_entity_task(climate.process_climate_data, gdirs)
    workflow.execute_entity_task(climate.local_t_star,
                                 gdirs,
                                 tstar=tstar,
                                 bias=0)
    workflow.execute_entity_task(climate.mu_star_calibration, gdirs)
    # run inversion tasks
    workflow.inversion_tasks(gdirs)
    # finalize preprocessing
    workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)

    # use t* as center year, even if specified differently
    kwargs['y0'] = tstar
    # run for 3000 years if not specified otherwise
    if nyears is None:
        nyears = 10000
    years = np.arange(0, nyears + 1)

    # create dataset
    ds = list()

    # run RandomMassBalance model centered around t*, once without
    # temperature bias and once with positive and negative temperature bias
    # of 0.5 °C each.
    for gdir in gdirs:
        # set random seed to get reproducible results
        kwargs.setdefault('seed', 12)
        kwargs.setdefault('halfsize', 15)
        kwargs.setdefault('mb_model_class', flowline.RandomMassBalance)
        kwargs.setdefault('filename', 'climate_historical')
        kwargs.setdefault('input_filesuffix', '')
        kwargs.setdefault('unique_samples', False)

        ds_ = list()

        fls = gdir.read_pickle('model_flowlines')

        for suffix, temp_bias in zip(suffixes, temp_biases):
            # instance mass balance model
            mb_mod = flowline.MultipleFlowlineMassBalance(gdir, **kwargs)

            if temp_bias is not None:
                # add given temperature bias to mass balance model
                mb_mod.temp_bias = temp_bias

            # create empty container
            spec_mb = list()
            # iterate over all years
            for yr in years:
                spec_mb.append(mb_mod.get_specific_mb(fls=fls, year=yr))

            # add to dataset
            da = xr.DataArray(spec_mb, dims=('year'), coords={'year': years})
            ds_.append(xr.Dataset({'spec_mb': da}))

        ds_ = xr.concat(ds_, pd.Index(temp_biases, name='temp_bias'))
        ds_.coords['rgi_id'] = gdir.rgi_id
        ds.append(ds_)

    ds = xr.concat(ds, 'rgi_id')

    # store datasets
    if path:
        if path is True:
            path = os.path.join(cfg.PATHS['working_dir'], 'mb_output_fl.nc')
        ds.to_netcdf(path)
        # ds_normal.to_netcdf(path[1])

    # return ds, ds_normal
    return ds
cfg.PATHS['working_dir'] = WORKING_DIR
# Use multiprocessing
cfg.PARAMS['use_multiprocessing'] = True
# We make the border 80 so we can use the Columbia itmix DEM
cfg.PARAMS['border'] = 20
# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = True
#For tidewater we set this to False
cfg.PARAMS['correct_for_neg_flux'] = False
cfg.PARAMS['filter_for_neg_flux'] = False
# We will needs this set to true for the calving loop
cfg.PARAMS['allow_negative_mustar'] = True

# We use intersects
path = utils.get_rgi_intersects_region_file(rgi_region, version=rgi_version)
cfg.set_intersects_db(path)

# RGI file, 4 Marine-terminating glaciers were merged with their respective
# branches, this is why we use our own version of RGI v6
rgidf = gpd.read_file(RGI_FILE)

# Pre-download other files which will be needed later
_ = utils.get_cru_file(var='tmp')
p = utils.get_cru_file(var='pre')
print('CRU file: ' + p)

# Some controllers maybe this is not necessary
RUN_GIS_mask = True
RUN_GIS_PREPRO = True  # run GIS pre-processing tasks (before climate)
RUN_CLIMATE_PREPRO = True  # run climate pre-processing tasks
RUN_INVERSION = True  # run bed inversion
Exemplo n.º 35
0
def run_benchmark(rgi_version=None,
                  rgi_reg=None,
                  border=None,
                  output_folder='',
                  working_dir='',
                  is_test=False,
                  test_rgidf=None,
                  test_intersects_file=None,
                  test_topofile=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    odf = pd.DataFrame()

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    base_dir = os.path.join(output_folder)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    # Read RGI
    start = time.time()
    if test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(
            utils.get_rgi_region_file(rgi_reg, version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(2)
    _add_time_to_df(odf, 'Read RGI', time.time() - start)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # Initialize working directories
    start = time.time()
    gdirs = workflow.init_glacier_directories(rgidf, reset=True, force=True)
    _add_time_to_df(odf, 'init_glacier_directories', time.time() - start)

    # Tasks
    task_list = [
        tasks.define_glacier_region,
        tasks.process_cru_data,
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
        tasks.init_present_time_glacier,
    ]
    for task in task_list:
        start = time.time()
        workflow.execute_entity_task(task, gdirs)
        _add_time_to_df(odf, task.__name__, time.time() - start)

    # Runs
    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate,
                                 gdirs,
                                 nyears=250,
                                 bias=0,
                                 seed=0,
                                 output_filesuffix='_tstar')
    _add_time_to_df(odf, 'run_random_climate_tstar_250', time.time() - start)

    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate,
                                 gdirs,
                                 nyears=250,
                                 y0=1995,
                                 seed=0,
                                 output_filesuffix='_commit')
    _add_time_to_df(odf, 'run_random_climate_commit_250', time.time() - start)

    # Compile results
    start = time.time()
    utils.compile_glacier_statistics(gdirs)
    _add_time_to_df(odf, 'compile_glacier_statistics', time.time() - start)

    start = time.time()
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=[1920, 1960, 2000])
    _add_time_to_df(odf, 'compile_climate_statistics', time.time() - start)

    start = time.time()
    utils.compile_run_output(gdirs, input_filesuffix='_tstar')
    _add_time_to_df(odf, 'compile_run_output_tstar', time.time() - start)

    start = time.time()
    utils.compile_run_output(gdirs, input_filesuffix='_commit')
    _add_time_to_df(odf, 'compile_run_output_commit', time.time() - start)

    # Log
    opath = os.path.join(base_dir, 'benchmarks_b{:03d}.csv'.format(border))
    odf.index.name = 'Task'
    odf.to_csv(opath)
    log.workflow('OGGM benchmarks is done!')
Exemplo n.º 36
0
def init_glacier_directories(rgidf=None, *, reset=False, force=False,
                             from_prepro_level=None, prepro_border=None,
                             prepro_rgi_version=None, prepro_base_url=None,
                             from_tar=False, delete_tar=False,
                             use_demo_glaciers=None):
    """Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame or list of ids, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.
    from_prepro_level : int
        get the gdir data from the official pre-processed pool. See the
        documentation for more information
    prepro_border : int
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['border']`
    prepro_rgi_version : str
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['rgi_version']`
    prepro_base_url : str
        for `from_prepro_level` only: if you want to override the default
        URL from which to download the gdirs. Default currently is
        https://cluster.klima.uni-bremen.de/~fmaussion/gdirs/oggm_v1.1/
    use_demo_glaciers : bool
        whether to check the demo glaciers for download (faster than the
        standard prepro downloads). The default is to decide whether or
        not to check based on simple criteria such as glacier list size.
    from_tar : bool, default=False
        extract the gdir data from a tar file. If set to `True`,
        will check for a tar file at the expected location in `base_dir`.
    delete_tar : bool, default=False
        delete the original tar file after extraction.

    Returns
    -------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the initialised glacier directories

    Notes
    -----
    This task is very similar to init_glacier_regions, with one main
    difference: it does not process the DEMs for this glacier.
    Eventually, init_glacier_regions will be deprecated and removed from the
    codebase.
    """

    _check_duplicates(rgidf)

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    if from_prepro_level:
        url = utils.get_prepro_base_url(base_url=prepro_base_url,
                                        border=prepro_border,
                                        prepro_level=from_prepro_level,
                                        rgi_version=prepro_rgi_version)
        if cfg.PARAMS['has_internet'] and not utils.url_exists(url):
            raise InvalidParamsError("base url seems unreachable with these "
                                     "parameters: {}".format(url))

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    if rgidf is None:
        # Infer the glacier directories from folders available in working dir
        if reset:
            raise ValueError('Cannot use reset without setting rgidf')
        log.workflow('init_glacier_directories by parsing all available '
                     'folders (this takes time: if possible, provide rgidf '
                     'instead).')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        gdirs = []
        for root, _, files in os.walk(gl_dir):
            if files and ('outlines.shp' in files or
                          'outlines.tar.gz' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:
        # Create glacier directories from input
        # Check if dataframe or list of str
        try:
            entities = []
            for _, entity in rgidf.iterrows():
                entities.append(entity)
        except AttributeError:
            entities = utils.tolist(rgidf)

        # Check demo
        if use_demo_glaciers is None:
            use_demo_glaciers = len(entities) < 100

        if from_prepro_level is not None:
            log.workflow('init_glacier_directories from prepro level {} on '
                         '{} glaciers.'.format(from_prepro_level,
                                               len(entities)))
            # Read the hash dictionary before we use multiproc
            if cfg.PARAMS['dl_verify']:
                utils.get_dl_verify_data('cluster.klima.uni-bremen.de')
            gdirs = execute_entity_task(gdir_from_prepro, entities,
                                        from_prepro_level=from_prepro_level,
                                        prepro_border=prepro_border,
                                        prepro_rgi_version=prepro_rgi_version,
                                        check_demo_glacier=use_demo_glaciers,
                                        base_url=prepro_base_url)
        else:
            # We can set the intersects file automatically here
            if (cfg.PARAMS['use_intersects'] and
                    len(cfg.PARAMS['intersects_gdf']) == 0):
                try:
                    rgi_ids = np.unique(np.sort([entity.RGIId for entity in
                                                 entities]))
                    rgi_version = rgi_ids[0].split('-')[0][-2:]
                    fp = utils.get_rgi_intersects_entities(rgi_ids,
                                                           version=rgi_version)
                    cfg.set_intersects_db(fp)
                except AttributeError:
                    # List of str
                    pass

            gdirs = execute_entity_task(utils.GlacierDirectory, entities,
                                        reset=reset,
                                        from_tar=from_tar,
                                        delete_tar=delete_tar)

    return gdirs
Exemplo n.º 37
0
from gmd_analysis_scripts import PLOT_DIR
from oggm.utils import get_rgi_glacier_entities, get_rgi_intersects_entities, mkdir

fig_path = os.path.join(PLOT_DIR, 'hef_scenarios.pdf')

cfg.initialize()

cfg.PARAMS['border'] = 60

base_dir = os.path.join(os.path.expanduser('~/tmp'), 'OGGM_GMD', 'scenarios')
cfg.PATHS['working_dir'] = base_dir
mkdir(base_dir, reset=True)

entity = get_rgi_glacier_entities(['RGI60-11.00897']).iloc[0]
gdir = oggm.GlacierDirectory(entity, base_dir=base_dir)
cfg.set_intersects_db(get_rgi_intersects_entities(['RGI60-11.00897']))

tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.compute_downstream_bedshape(gdir)
tasks.catchment_area(gdir)
tasks.catchment_intersections(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
tasks.process_cru_data(gdir)
tasks.local_t_star(gdir)
tasks.mu_star_calibration(gdir)
tasks.prepare_for_inversion(gdir)