Пример #1
0
    def setUp(self):

        # test directory
        self.testdir = os.path.join(get_test_dir(), 'tmp')
        if not os.path.exists(self.testdir):
            os.makedirs(self.testdir)
        self.clean_dir()

        # Init
        cfg.initialize()
        cfg.PARAMS['use_intersects'] = False
        cfg.PATHS['working_dir'] = self.testdir
        cfg.PATHS['dem_file'] = get_demo_file('dem_SouthGlacier.tif')
        cfg.PATHS['cru_dir'] = os.path.dirname(cfg.PATHS['dem_file'])
        cfg.PARAMS['border'] = 10
Пример #2
0
    def setUp(self):

        # test directory
        self.testdir = os.path.join(get_test_dir(), 'tmp')
        if not os.path.exists(self.testdir):
            os.makedirs(self.testdir)
        self.clean_dir()

        self.rgi_file = get_demo_file('rgi_RGI50-01.10299.shp')

        # Init
        cfg.initialize()
        cfg.PARAMS['use_intersects'] = False
        cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif')
        cfg.PARAMS['border'] = 40
Пример #3
0
class full_workflow:

    testdir = os.path.join(get_test_dir(), 'benchmarks', 'track_wf')

    def cfg_init(self):

        # Initialize OGGM and set up the default run parameters
        cfg.initialize()
        cfg.PATHS['working_dir'] = self.testdir
        cfg.PARAMS['use_multiprocessing'] = True
        cfg.PARAMS['border'] = 100
        cfg.PARAMS['continue_on_error'] = False

    def setup_cache(self):

        setattr(full_workflow.setup_cache, "timeout", 360)

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        # Pre-download other files which will be needed later
        utils.get_cru_cl_file()
        utils.get_cru_file(var='tmp')
        utils.get_cru_file(var='pre')

        # Get the RGI glaciers for the run.
        rgi_list = ['RGI60-01.10299', 'RGI60-11.00897', 'RGI60-18.02342']
        rgidf = utils.get_rgi_glacier_entities(rgi_list)

        # We use intersects
        db = utils.get_rgi_intersects_entities(rgi_list, version='61')
        cfg.set_intersects_db(db)

        # Sort for more efficient parallel computing
        rgidf = rgidf.sort_values('Area', ascending=False)

        # Go - initialize glacier directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.compute_downstream_line,
            tasks.compute_downstream_bedshape,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        execute_entity_task(tasks.local_t_star, gdirs)
        execute_entity_task(tasks.mu_star_calibration, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        # Final preparation for the run
        execute_entity_task(tasks.init_present_time_glacier, gdirs)

        # Random climate representative for the tstar climate, without bias
        # In an ideal world this would imply that the glaciers remain stable,
        # but it doesn't have to be so
        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            bias=0,
                            nyears=100,
                            output_filesuffix='_tstar')

        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            y0=1990,
                            nyears=100,
                            output_filesuffix='_pd')

        # Compile output
        utils.compile_glacier_statistics(gdirs)
        utils.compile_run_output(gdirs, filesuffix='_tstar')
        utils.compile_run_output(gdirs, filesuffix='_pd')
        utils.compile_climate_input(gdirs)

        return gdirs

    def track_start_volume(self, gdirs):
        self.cfg_init()
        path = os.path.join(cfg.PATHS['working_dir'], 'run_output_tstar.nc')
        ds = xr.open_dataset(path)
        return float(ds.volume.sum(dim='rgi_id').isel(time=0)) * 1e-9

    def track_tstar_run_final_volume(self, gdirs):
        self.cfg_init()
        path = os.path.join(cfg.PATHS['working_dir'], 'run_output_tstar.nc')
        ds = xr.open_dataset(path)
        return float(ds.volume.sum(dim='rgi_id').isel(time=-1)) * 1e-9

    def track_1990_run_final_volume(self, gdirs):
        self.cfg_init()
        path = os.path.join(cfg.PATHS['working_dir'], 'run_output_pd.nc')
        ds = xr.open_dataset(path)
        return float(ds.volume.sum(dim='rgi_id').isel(time=-1)) * 1e-9

    def track_avg_temp_full_period(self, gdirs):
        self.cfg_init()
        path = os.path.join(cfg.PATHS['working_dir'], 'climate_input.nc')
        ds = xr.open_dataset(path)
        return float(ds.temp.mean())

    def track_avg_prcp_full_period(self, gdirs):
        self.cfg_init()
        path = os.path.join(cfg.PATHS['working_dir'], 'climate_input.nc')
        ds = xr.open_dataset(path)
        return float(ds.prcp.mean())
Пример #4
0
class hef_prepro:

    testdir = os.path.join(get_test_dir(), 'benchmarks', 'track_hef')

    def cfg_init(self):

        # Init
        cfg.initialize(logging_level='ERROR')
        cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
        cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
        cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
        cfg.PARAMS['baseline_climate'] = 'CUSTOM'

    def setup_cache(self):

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)

        tasks.define_glacier_region(gdir)
        tasks.glacier_masks(gdir)
        tasks.compute_centerlines(gdir)
        tasks.initialize_flowlines(gdir)
        tasks.compute_downstream_line(gdir)
        tasks.compute_downstream_bedshape(gdir)
        tasks.catchment_area(gdir)
        tasks.catchment_intersections(gdir)
        tasks.catchment_width_geom(gdir)
        tasks.catchment_width_correction(gdir)
        tasks.process_custom_climate_data(gdir)
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=FutureWarning)
            tasks.glacier_mu_candidates(gdir)
        mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
        res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
        tasks.local_t_star(gdir, tstar=res['t_star'],
                           bias=res['bias'])
        tasks.mu_star_calibration(gdir)

        tasks.prepare_for_inversion(gdir)
        tasks.mass_conservation_inversion(gdir)

        return gdir

    def track_average_slope(self, gdir):
        self.cfg_init()
        cls = gdir.read_pickle('inversion_input')
        out = np.array([])
        for cl in cls:
            out = np.append(out, cl['slope_angle'])
        return np.mean(out)

    def track_average_width(self, gdir):
        self.cfg_init()
        cls = gdir.read_pickle('inversion_input')
        out = np.array([])
        for cl in cls:
            out = np.append(out, cl['width'])
        return np.mean(out)

    def track_rectangular_ratio(self, gdir):
        self.cfg_init()
        cls = gdir.read_pickle('inversion_input')
        out = np.array([])
        for cl in cls:
            out = np.append(out, cl['is_rectangular'])
        return np.sum(out) / len(out)

    def track_mustar(self, gdir):
        self.cfg_init()
        df = gdir.read_json('local_mustar')
        assert df['mu_star_allsame']
        return df['mu_star_glacierwide']

    def track_bias(self, gdir):
        self.cfg_init()
        df = gdir.read_json('local_mustar')
        return df['bias']

    def track_mb_1980_avg(self, gdir):
        self.cfg_init()
        mb = massbalance.PastMassBalance(gdir)
        h, w = gdir.get_inversion_flowline_hw()
        mb_ts = mb.get_specific_mb(heights=h, widths=w,
                                   year=np.arange(31)+1970)
        return np.mean(mb_ts)

    def track_mb_1980_sigma(self, gdir):
        self.cfg_init()
        mb = massbalance.PastMassBalance(gdir)
        h, w = gdir.get_inversion_flowline_hw()
        mb_ts = mb.get_specific_mb(heights=h, widths=w,
                                   year=np.arange(31)+1970)
        return np.std(mb_ts)

    def track_mb_1870_avg(self, gdir):
        self.cfg_init()
        mb = massbalance.PastMassBalance(gdir)
        h, w = gdir.get_inversion_flowline_hw()
        mb_ts = mb.get_specific_mb(heights=h, widths=w,
                                   year=np.arange(31)+1860)
        return np.mean(mb_ts)

    def track_mb_1870_sigma(self, gdir):
        self.cfg_init()
        mb = massbalance.PastMassBalance(gdir)
        h, w = gdir.get_inversion_flowline_hw()
        mb_ts = mb.get_specific_mb(heights=h, widths=w,
                                   year=np.arange(31)+1860)
        return np.std(mb_ts)

    def track_inversion_volume(self, gdir):
        self.cfg_init()
        inv_cls = gdir.read_pickle('inversion_output')
        vol = 0
        for cl in inv_cls:
            vol += np.sum(cl['volume'])
        return vol * 1e-9
Пример #5
0
from oggm import workflow
from oggm.utils import get_demo_file, rmsd, write_centerlines_to_shape
from oggm.tests import is_slow, RUN_WORKFLOW_TESTS
from oggm.tests import is_graphic_test, BASELINE_DIR
from oggm.tests.funcs import (get_test_dir, use_multiprocessing,
                              patch_url_retrieve_github)
from oggm.core import flowline, massbalance
from oggm import tasks
from oggm import utils

# do we event want to run the tests?
if not RUN_WORKFLOW_TESTS:
    raise unittest.SkipTest('Skipping all workflow tests.')

# Globals
TEST_DIR = os.path.join(get_test_dir(), 'tmp_workflow')
CLI_LOGF = os.path.join(TEST_DIR, 'clilog.pkl')

_url_retrieve = None


def setup_module(module):
    module._url_retrieve = utils._urlretrieve
    utils._urlretrieve = patch_url_retrieve_github


def teardown_module(module):
    utils._urlretrieve = module._url_retrieve


def clean_dir(testdir):
Пример #6
0
import os
import shutil
import numpy as np
from oggm.tests.funcs import init_hef, get_test_dir
from oggm import utils, tasks
from oggm.core import massbalance, flowline

testdir = os.path.join(get_test_dir(), 'benchmarks')
utils.mkdir(testdir, reset=True)
heights = np.linspace(2200, 3600, 120)
years = np.arange(151) + 1850


def teardown():
    if os.path.exists(testdir):
        shutil.rmtree(testdir)


def setup():
    global gdir
    gdir = init_hef(border=80, logging_level='ERROR')
    teardown()
    gdir = tasks.copy_to_basedir(gdir, base_dir=testdir, setup='all')
    flowline.init_present_time_glacier(gdir)


def time_hef_run_until():

    mb_mod = massbalance.RandomMassBalance(gdir, bias=0, seed=0)
    fls = gdir.read_pickle('model_flowlines')
    model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.)
Пример #7
0
 def setUp(self):
     cfg.initialize()
     self.testdir = os.path.join(get_test_dir(), 'tmp_gir')
     self.reset_dir()
Пример #8
0
from scipy.optimize import minimize_scalar

# import the needed OGGM modules
import oggm
from oggm import cfg
from oggm.utils import get_demo_file, get_rgi_glacier_entities
from oggm.tests.funcs import get_test_dir
from oggm.core import gis, climate, centerlines
from oggm.core import vascaling

# ---------------------
#  PREPROCESSING TASKS
# ---------------------

# create temporary working directory
testdir = os.path.join(get_test_dir(), 'tmp_comparison')
if not os.path.exists(testdir):
    os.makedirs(testdir)
shutil.rmtree(testdir)
os.makedirs(testdir)

# load default parameter file
cfg.initialize()
cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
cfg.PATHS['working_dir'] = testdir
cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
cfg.PARAMS['border'] = 10
# cfg.PARAMS['run_mb_calibration'] = True
cfg.PARAMS['baseline_climate'] = 'HISTALP'
cfg.PARAMS['use_multiprocessing'] = True
# use HistAlp as climate file
Пример #9
0
# Locals
import oggm
import oggm.cfg as cfg
from oggm import workflow
from oggm.utils import get_demo_file, write_centerlines_to_shape
from oggm.tests import mpl_image_compare
from oggm.tests.funcs import (get_test_dir, use_multiprocessing,
                              patch_url_retrieve_github)
from oggm.core import flowline
from oggm import tasks
from oggm import utils

# Globals
pytestmark = pytest.mark.test_env("workflow")
TEST_DIR = os.path.join(get_test_dir(), 'tmp_workflow')
CLI_LOGF = os.path.join(TEST_DIR, 'clilog.pkl')

_url_retrieve = None


def setup_module(module):
    module._url_retrieve = utils.oggm_urlretrieve
    oggm.utils._downloads.oggm_urlretrieve = patch_url_retrieve_github


def teardown_module(module):
    oggm.utils._downloads.oggm_urlretrieve = module._url_retrieve


def clean_dir(testdir):
Пример #10
0
import os
import shutil
import numpy as np
from oggm.tests.funcs import init_hef, get_test_dir
from oggm import utils, tasks
from oggm.core import massbalance


testdir = os.path.join(get_test_dir(), 'benchmarks')
utils.mkdir(testdir, reset=True)
heights = np.linspace(2200, 3600, 120)
years = np.arange(151) + 1850


def teardown():
    if os.path.exists(testdir):
        shutil.rmtree(testdir)


def setup():
    global gdir
    gdir = init_hef(border=80)
    teardown()
    gdir = tasks.copy_to_basedir(gdir, base_dir=testdir, setup='all')


def time_PastMassBalance():

    mb_mod = massbalance.PastMassBalance(gdir, bias=0)
    for yr in years:
        mb_mod.get_annual_mb(heights, year=yr)