Exemple #1
0
    def test_leclercq_data(self):

        hef_file = utils.get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)

        df = gdir.get_ref_length_data()
        assert df.name == 'Hintereis'
        assert len(df) == 105
Exemple #2
0
    def test_local_t_star(self):

        # set parameters for climate file and mass balance calibration
        cfg.PARAMS['baseline_climate'] = 'CUSTOM'
        cfg.PARAMS['baseline_y0'] = 1850
        cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
        cfg.PARAMS['run_mb_calibration'] = False

        # read the Hintereisferner
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and the glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)
        # run centerline prepro tasks
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)
        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # compute the reference t* for the glacier
        # given the reference of mass balance measurements
        res = vascaling.t_star_from_refmb(gdir)
        t_star, bias = res['t_star'], res['bias']
        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir, tstar=t_star, bias=bias)
        # read calibration results
        vas_mustar_refmb = gdir.read_json('vascaling_mustar')

        # get reference t* list
        ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp']
        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir, ref_df=ref_df)
        # read calibration results
        vas_mustar_refdf = gdir.read_json('vascaling_mustar')

        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir)
        # read calibration results
        vas_mustar = gdir.read_json('vascaling_mustar')

        # compare with each other
        assert vas_mustar_refdf == vas_mustar
        # TODO: this test is failing currently
        # np.testing.assert_allclose(vas_mustar_refmb['bias'],
        #                            vas_mustar_refdf['bias'], atol=1)
        vas_mustar_refdf.pop('bias')
        vas_mustar_refmb.pop('bias')
        # end of workaround
        assert vas_mustar_refdf == vas_mustar_refmb
Exemple #3
0
    def _set_up_VAS_model(self):
        """Avoiding a chunk of code duplicate. Set's up a running volume/area
        scaling model, including all needed prepo tasks.
        """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # run center line preprocessing tasks
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)

        # read reference glacier mass balance data
        mbdf = gdir.get_ref_mb_data()
        # compute the reference t* for the glacier
        # given the reference of mass balance measurements
        res = climate.t_star_from_refmb(gdir, mbdf=mbdf['ANNUAL_BALANCE'])
        t_star, bias = res['t_star'], res['bias']

        # --------------------
        #  MASS BALANCE TASKS
        # --------------------

        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir, tstar=t_star, bias=bias)

        # instance the mass balance models
        mbmod = vascaling.VAScalingMassBalance(gdir)

        # ----------------
        #  DYNAMICAL PART
        # ----------------
        # get reference area
        a0 = gdir.rgi_area_m2
        # get reference year
        y0 = gdir.get_climate_info()['baseline_hydro_yr_0']
        # get min and max glacier surface elevation
        h0, h1 = vascaling.get_min_max_elevation(gdir)

        model = vascaling.VAScalingModel(year_0=y0, area_m2_0=a0,
                                         min_hgt=h0, max_hgt=h1,
                                         mb_model=mbmod)
        return gdir, model
Exemple #4
0
    def test_set_width(self):
        entity = gpd.read_file(self.rgi_file).iloc[0]

        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        gis.define_glacier_region(gdir)
        gis.glacier_masks(gdir)
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.compute_downstream_line(gdir)
        centerlines.compute_downstream_bedshape(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)

        # Test that area and area-altitude elev is fine
        with utils.ncDataset(gdir.get_filepath('gridded_data')) as nc:
            mask = nc.variables['glacier_mask'][:]
            topo = nc.variables['topo_smoothed'][:]
        rhgt = topo[np.where(mask)][:]

        fls = gdir.read_pickle('inversion_flowlines')
        hgt, widths = gdir.get_inversion_flowline_hw()

        bs = 100
        bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True),
                         utils.nicenumber(np.max(hgt), bs) + 1,
                         bs)
        h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins)
        h2, b = np.histogram(rhgt, density=True, bins=bins)
        h1 = h1 / np.sum(h1)
        h2 = h2 / np.sum(h2)
        assert utils.rmsd(h1, h2) < 0.02  # less than 2% error
        new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx)
        np.testing.assert_allclose(new_area, gdir.rgi_area_m2)

        centerlines.terminus_width_correction(gdir, new_width=714)

        fls = gdir.read_pickle('inversion_flowlines')
        hgt, widths = gdir.get_inversion_flowline_hw()

        # Check that the width is ok
        np.testing.assert_allclose(fls[-1].widths[-1] * gdir.grid.dx, 714)

        # Check for area distrib
        bins = np.arange(utils.nicenumber(np.min(hgt), bs, lower=True),
                         utils.nicenumber(np.max(hgt), bs) + 1,
                         bs)
        h1, b = np.histogram(hgt, weights=widths, density=True, bins=bins)
        h2, b = np.histogram(rhgt, density=True, bins=bins)
        h1 = h1 / np.sum(h1)
        h2 = h2 / np.sum(h2)
        assert utils.rmsd(h1, h2) < 0.02  # less than 2% error
        new_area = np.sum(widths * fls[-1].dx * gdir.grid.dx)
        np.testing.assert_allclose(new_area, gdir.rgi_area_m2)
Exemple #5
0
def gdir_from_tar(entity, from_tar):

    try:
        rgi_id = entity.RGIId
    except AttributeError:
        rgi_id = entity

    from_tar = os.path.join(from_tar, '{}'.format(rgi_id[:8]),
                            '{}.tar'.format(rgi_id[:11]))
    assert os.path.exists(from_tar), 'tarfile does not exist'
    from_tar = os.path.join(from_tar.replace('.tar', ''), rgi_id + '.tar.gz')
    return oggm.GlacierDirectory(entity, from_tar=from_tar)
Exemple #6
0
def init_glacier_regions(rgidf=None, reset=False, force=False):
    """Very first task to do (always).

    Set reset=True in order to delete the content of the directories.
    """

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without a rgi file')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:
        for _, entity in rgidf.iterrows():
            gdir = oggm.GlacierDirectory(entity, reset=reset)
            if not os.path.exists(gdir.get_filepath('dem')):
                new_gdirs.append((gdir, dict(entity=entity)))
            gdirs.append(gdir)

    # If not initialized, run the task in parallel
    execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemple #7
0
    def test_run_until_equilibrium(self):
        """"""
        # let's not use the mass balance bias since we want to reproduce
        # results from mass balance calibration
        cfg.PARAMS['use_bias_for_run'] = False

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)
        # compute mass balance parameters
        ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp']
        vascaling.local_t_star(gdir, ref_df=ref_df)

        # instance a constant mass balance model, centred around t*
        mb_model = vascaling.ConstantVASMassBalance(gdir)
        # add a positive temperature bias
        mb_model.temp_bias = 0.5

        # create a VAS model: start with year 0  since we are using a constant
        # massbalance model, other values are read from RGI
        min_hgt, max_hgt = vascaling.get_min_max_elevation(gdir)
        model = vascaling.VAScalingModel(year_0=0,
                                         area_m2_0=gdir.rgi_area_m2,
                                         min_hgt=min_hgt,
                                         max_hgt=max_hgt,
                                         mb_model=mb_model)

        # run glacier with new mass balance model
        model.run_until_equilibrium(rate=1e-4)

        # equilibrium should be reached after a couple of 100 years
        assert model.year <= 300
        # new equilibrium glacier should be smaller (positive temperature bias)
        assert model.volume_m3 < model.volume_m3_0

        # run glacier for another 100 years and check volume again
        v_eq = model.volume_m3
        model.run_until(model.year + 100)
        assert abs(1 - (model.volume_m3 / v_eq)) < 0.01
Exemple #8
0
def gdir_from_prepro(entity, from_prepro_level=None,
                     prepro_border=None, prepro_rgi_version=None,
                     base_url=None):

    if prepro_border is None:
        prepro_border = int(cfg.PARAMS['border'])
    if prepro_rgi_version is None:
        prepro_rgi_version = cfg.PARAMS['rgi_version']
    try:
        rid = entity.RGIId
    except AttributeError:
        rid = entity

    tar_base = utils.get_prepro_gdir(prepro_rgi_version, rid, prepro_border,
                                     from_prepro_level, base_url=base_url)
    from_tar = os.path.join(tar_base.replace('.tar', ''), rid + '.tar.gz')
    return oggm.GlacierDirectory(entity, from_tar=from_tar)
Exemple #9
0
    def _setup_mb_test(self):
        """Avoiding a chunk of code duplicate. Performs needed prepo tasks and
        returns the oggm.GlacierDirectory.
        """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]
        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and the glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # run centerline prepro tasks
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)

        # read reference glacier mass balance data
        mbdf = gdir.get_ref_mb_data()
        # compute the reference t* for the glacier
        # given the reference of mass balance measurements
        res = vascaling.t_star_from_refmb(gdir, mbdf=mbdf['ANNUAL_BALANCE'])
        t_star, bias = res['t_star'], res['bias']

        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir, tstar=t_star, bias=bias)

        # run OGGM mu* calibration
        climate.local_t_star(gdir, tstar=t_star, bias=bias)
        climate.mu_star_calibration(gdir)

        # pass the GlacierDirectory
        return gdir
Exemple #10
0
def init_glacier_regions(rgidf, reset=False, force=False):
    """Very first task to do (always).

    Set reset=True in order to delete the content of the directories.
    """

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    for _, entity in rgidf.iterrows():
        gdir = oggm.GlacierDirectory(entity, reset=reset)
        if not os.path.exists(gdir.get_filepath('dem')):
            tasks.define_glacier_region(gdir, entity=entity)
        gdirs.append(gdir)

    return gdirs
Exemple #11
0
def init_columbia(reset=False):

    from oggm.core import gis, centerlines
    import geopandas as gpd

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_columbia')
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['use_intersects'] = False
    cfg.PATHS['dem_file'] = get_demo_file('dem_Columbia.tif')
    cfg.PARAMS['border'] = 10
    cfg.PARAMS['use_kcalving_for_inversion'] = True
    cfg.PARAMS['use_kcalving_for_run'] = True

    entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0]
    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if gdir.has_file('climate_historical'):
        return gdir

    gis.define_glacier_region(gdir)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    tasks.process_dummy_cru_file(gdir, seed=0)
    apply_test_ref_tstars()
    return gdir
Exemple #12
0
def gdir_from_prepro(entity, from_prepro_level=None,
                     prepro_border=None, prepro_rgi_version=None,
                     check_demo_glacier=False, base_url=None):

    if prepro_border is None:
        prepro_border = int(cfg.PARAMS['border'])
    if prepro_rgi_version is None:
        prepro_rgi_version = cfg.PARAMS['rgi_version']
    try:
        rid = entity.RGIId
    except AttributeError:
        rid = entity

    if check_demo_glacier and base_url is None:
        demo_id = utils.demo_glacier_id(rid)
        if demo_id is not None:
            rid = demo_id
            entity = demo_id
            base_url = utils.DEMO_GDIR_URL

    tar_base = utils.get_prepro_gdir(prepro_rgi_version, rid, prepro_border,
                                     from_prepro_level, base_url=base_url)
    from_tar = os.path.join(tar_base.replace('.tar', ''), rid + '.tar.gz')
    return oggm.GlacierDirectory(entity, from_tar=from_tar)
Exemple #13
0
    def test_terminus_temp(self):
        """Testing the subroutine which computes the terminus temperature
        from the given climate file and glacier DEM. Pretty straight forward
        and somewhat useless, but nice finger exercise.
        """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid
        gis.define_glacier_region(gdir, entity=entity)
        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # read the following variable from the center pixel (46.83N 10.75E)
        # of the Hintereisferner HistAlp climate file for the
        # entire time period from October 1801 until September 2003
        # - surface height in m asl.
        # - total precipitation amount in kg/m2
        # - 2m air temperature in °C
        with utils.ncDataset(get_demo_file('histalp_merged_hef.nc')) as nc_r:
            ref_h = nc_r.variables['hgt'][1, 1]
            ref_t = nc_r.variables['temp'][:, 1, 1]

        # define a temperature anomaly
        temp_anomaly = 0

        # specify temperature gradient
        temp_grad = -0.0065

        # the terminus temperature must equal the input temperature
        # if terminus elevation equals reference elevation
        temp_terminus =\
            vascaling._compute_temp_terminus(ref_t, temp_grad, ref_hgt=ref_h,
                                             terminus_hgt=ref_h,
                                             temp_anomaly=temp_anomaly)
        np.testing.assert_allclose(temp_terminus, ref_t + temp_anomaly)

        # the terminus temperature must equal the input terperature
        # if the gradient is zero
        for term_h in np.array([-100, 0, 100]) + ref_h:
            temp_terminus =\
                vascaling._compute_temp_terminus(ref_t, temp_grad=0,
                                                 ref_hgt=ref_h,
                                                 terminus_hgt=term_h,
                                                 temp_anomaly=temp_anomaly)
            np.testing.assert_allclose(temp_terminus, ref_t + temp_anomaly)

        # now test the routine with actual elevation differences
        # and a non zero temperature gradient
        for h_diff in np.array([-100, 0, 100]):
            term_h = ref_h + h_diff
            temp_diff = temp_grad * h_diff
            temp_terminus =\
                vascaling._compute_temp_terminus(ref_t, temp_grad,
                                                 ref_hgt=ref_h,
                                                 terminus_hgt=term_h,
                                                 temp_anomaly=temp_anomaly)
            np.testing.assert_allclose(temp_terminus,
                                       ref_t + temp_anomaly + temp_diff)
from oggm.core.climate import (mb_yearly_climate_on_glacier, t_star_from_refmb,
                               local_t_star, mu_star_calibration)
from oggm.core.massbalance import (ConstantMassBalance)
from oggm.utils import get_demo_file, gettempdir
from oggm.shop import histalp

cfg.initialize()
cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
histalp.set_histalp_url('https://cluster.klima.uni-bremen.de/~oggm/'
                        'test_climate/histalp/')

base_dir = gettempdir('Climate_docs')
cfg.PATHS['working_dir'] = base_dir
entity = gpd.read_file(get_demo_file('HEF_MajDivide.shp')).iloc[0]
gdir = oggm.GlacierDirectory(entity, base_dir=base_dir, reset=True)

tasks.define_glacier_region(gdir)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.catchment_area(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
cfg.PARAMS['baseline_climate'] = 'HISTALP'
cfg.PARAMS['baseline_y0'] = 1850
tasks.process_histalp_data(gdir)
mu_yr_clim = tasks.glacier_mu_candidates(gdir)

mbdf = gdir.get_ref_mb_data()
Exemple #15
0
def init_hef(reset=False, border=40, logging_level='INFO'):

    from oggm.core import gis, inversion, climate, centerlines, flowline
    import geopandas as gpd

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize(logging_level=logging_level)
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['baseline_climate'] = ''
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
    climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias'])
    climate.mu_star_calibration(gdir)

    inversion.prepare_for_inversion(gdir, add_debug_var=True)

    ref_v = 0.573 * 1e9

    glen_n = cfg.PARAMS['glen_n']

    def to_optimize(x):
        # For backwards compat
        _fd = 1.9e-24 * x[0]
        glen_a = (glen_n + 2) * _fd / 2.
        fs = 5.7e-20 * x[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a)
        return (v - ref_v)**2

    out = optimization.minimize(to_optimize, [1, 1],
                                bounds=((0.01, 10), (0.01, 10)),
                                tol=1e-4)['x']
    _fd = 1.9e-24 * out[0]
    glen_a = (glen_n + 2) * _fd / 2.
    fs = 5.7e-20 * out[1]
    v, _ = inversion.mass_conservation_inversion(gdir,
                                                 fs=fs,
                                                 glen_a=glen_a,
                                                 write=True)

    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    d['factor_fs'] = out[1]
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
Exemple #16
0
    def test_repro_to_glacier(self, class_case_dir, monkeypatch):

        # Init
        cfg.initialize()
        cfg.PATHS['working_dir'] = class_case_dir
        cfg.PARAMS['use_intersects'] = False
        cfg.PATHS['dem_file'] = get_demo_file('dem_Columbia.tif')
        cfg.PARAMS['border'] = 10

        entity = gpd.read_file(get_demo_file('RGI60-01.10689.shp')).iloc[0]
        gdir = oggm.GlacierDirectory(entity)
        tasks.define_glacier_region(gdir)
        tasks.glacier_masks(gdir)

        # use our files
        region_files = {
            'ALA': {
                'vx': get_demo_file('crop_ALA_G0120_0000_vx.tif'),
                'vy': get_demo_file('crop_ALA_G0120_0000_vy.tif')
            }
        }
        monkeypatch.setattr(its_live, 'region_files', region_files)
        monkeypatch.setattr(utils, 'file_downloader', lambda x: x)

        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=RuntimeWarning)
            its_live.velocity_to_gdir(gdir)

        with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
            mask = ds.glacier_mask.data.astype(bool)
            vx = ds.obs_icevel_x.where(mask).data
            vy = ds.obs_icevel_y.where(mask).data

        vel = np.sqrt(vx**2 + vy**2)
        assert np.nanmax(vel) > 2900
        assert np.nanmin(vel) < 2

        # We reproject with rasterio and check no big diff
        cfg.BASENAMES['its_live_vx'] = ('its_live_vx.tif', '')
        cfg.BASENAMES['its_live_vy'] = ('its_live_vy.tif', '')
        gis.rasterio_to_gdir(gdir,
                             region_files['ALA']['vx'],
                             'its_live_vx',
                             resampling='bilinear')
        gis.rasterio_to_gdir(gdir,
                             region_files['ALA']['vy'],
                             'its_live_vy',
                             resampling='bilinear')

        with xr.open_rasterio(gdir.get_filepath('its_live_vx')) as da:
            _vx = da.where(mask).data.squeeze()
        with xr.open_rasterio(gdir.get_filepath('its_live_vy')) as da:
            _vy = da.where(mask).data.squeeze()

        _vel = np.sqrt(_vx**2 + _vy**2)
        np.testing.assert_allclose(utils.rmsd(vel[mask], _vel[mask]),
                                   0,
                                   atol=40)
        np.testing.assert_allclose(utils.md(vel[mask], _vel[mask]), 0, atol=8)

        if DO_PLOT:
            import matplotlib.pyplot as plt

            smap = salem.Map(gdir.grid.center_grid, countries=False)
            smap.set_shapefile(gdir.read_shapefile('outlines'))

            with warnings.catch_warnings():
                warnings.filterwarnings('ignore', category=RuntimeWarning)
                smap.set_topography(gdir.get_filepath('dem'))

            vel = np.sqrt(vx**2 + vy**2)
            smap.set_data(vel)
            smap.set_plot_params(cmap='Blues', vmin=None, vmax=None)

            xx, yy = gdir.grid.center_grid.xy_coordinates
            xx, yy = smap.grid.transform(xx, yy, crs=gdir.grid.proj)

            yy = yy[2::5, 2::5]
            xx = xx[2::5, 2::5]
            vx = vx[2::5, 2::5]
            vy = vy[2::5, 2::5]

            f, ax = plt.subplots()
            smap.visualize(ax=ax,
                           title='ITS_LIVE velocity',
                           cbar_title='m yr-1')
            ax.quiver(xx, yy, vx, vy)
            plt.show()
Exemple #17
0
    def test_run_random_climate(self):
        """ Test the run_random_climate task for a climate based on the
        equilibrium period centred around t*. Additionally a positive and a
        negative temperature bias are tested.

        Returns
        -------

        """
        # let's not use the mass balance bias since we want to reproduce
        # results from mass balance calibration
        cfg.PARAMS['use_bias_for_run'] = False

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)
        # compute mass balance parameters
        ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp']
        vascaling.local_t_star(gdir, ref_df=ref_df)

        # define some parameters for the random climate model
        nyears = 300
        seed = 1
        temp_bias = 0.5
        # read the equilibirum year used for the mass balance calibration
        t_star = gdir.read_json('vascaling_mustar')['t_star']
        # run model with random climate
        _ = vascaling.run_random_climate(gdir,
                                         nyears=nyears,
                                         y0=t_star,
                                         seed=seed)
        # run model with positive temperature bias
        _ = vascaling.run_random_climate(gdir,
                                         nyears=nyears,
                                         y0=t_star,
                                         seed=seed,
                                         temperature_bias=temp_bias,
                                         output_filesuffix='_bias_p')
        # run model with negative temperature bias
        _ = vascaling.run_random_climate(gdir,
                                         nyears=nyears,
                                         y0=t_star,
                                         seed=seed,
                                         temperature_bias=-temp_bias,
                                         output_filesuffix='_bias_n')

        # compile run outputs
        ds = utils.compile_run_output([gdir], input_filesuffix='')
        ds_p = utils.compile_run_output([gdir], input_filesuffix='_bias_p')
        ds_n = utils.compile_run_output([gdir], input_filesuffix='_bias_n')

        # the glacier should not change much under a random climate
        # based on the equilibirum period centered around t*
        assert abs(1 - ds.volume.mean() / ds.volume[0]) < 0.015
        # higher temperatures should result in a smaller glacier
        assert ds.volume.mean() > ds_p.volume.mean()
        # lower temperatures should result in a larger glacier
        assert ds.volume.mean() < ds_n.volume.mean()
Exemple #18
0
def init_glacier_directories(rgidf=None, *, reset=False, force=False,
                             from_prepro_level=None, prepro_border=None,
                             prepro_rgi_version=None, prepro_base_url=None,
                             from_tar=False, delete_tar=False,
                             use_demo_glaciers=None):
    """Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame or list of ids, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.
    from_prepro_level : int
        get the gdir data from the official pre-processed pool. See the
        documentation for more information
    prepro_border : int
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['border']`
    prepro_rgi_version : str
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['rgi_version']`
    prepro_base_url : str
        for `from_prepro_level` only: if you want to override the default
        URL from which to download the gdirs. Default currently is
        https://cluster.klima.uni-bremen.de/~fmaussion/gdirs/oggm_v1.1/
    use_demo_glaciers : bool
        whether to check the demo glaciers for download (faster than the
        standard prepro downloads). The default is to decide whether or
        not to check based on simple criteria such as glacier list size.
    from_tar : bool, default=False
        extract the gdir data from a tar file. If set to `True`,
        will check for a tar file at the expected location in `base_dir`.
    delete_tar : bool, default=False
        delete the original tar file after extraction.

    Returns
    -------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the initialised glacier directories

    Notes
    -----
    This task is very similar to init_glacier_regions, with one main
    difference: it does not process the DEMs for this glacier.
    Eventually, init_glacier_regions will be deprecated and removed from the
    codebase.
    """

    _check_duplicates(rgidf)

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    if from_prepro_level:
        url = utils.get_prepro_base_url(base_url=prepro_base_url,
                                        border=prepro_border,
                                        prepro_level=from_prepro_level,
                                        rgi_version=prepro_rgi_version)
        if cfg.PARAMS['has_internet'] and not utils.url_exists(url):
            raise InvalidParamsError("base url seems unreachable with these "
                                     "parameters: {}".format(url))

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    if rgidf is None:
        # Infer the glacier directories from folders available in working dir
        if reset:
            raise ValueError('Cannot use reset without setting rgidf')
        log.workflow('init_glacier_directories by parsing all available '
                     'folders (this takes time: if possible, provide rgidf '
                     'instead).')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        gdirs = []
        for root, _, files in os.walk(gl_dir):
            if files and ('outlines.shp' in files or
                          'outlines.tar.gz' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:
        # Create glacier directories from input
        # Check if dataframe or list of str
        try:
            entities = []
            for _, entity in rgidf.iterrows():
                entities.append(entity)
        except AttributeError:
            entities = utils.tolist(rgidf)

        # Check demo
        if use_demo_glaciers is None:
            use_demo_glaciers = len(entities) < 100

        if from_prepro_level is not None:
            log.workflow('init_glacier_directories from prepro level {} on '
                         '{} glaciers.'.format(from_prepro_level,
                                               len(entities)))
            # Read the hash dictionary before we use multiproc
            if cfg.PARAMS['dl_verify']:
                utils.get_dl_verify_data('cluster.klima.uni-bremen.de')
            gdirs = execute_entity_task(gdir_from_prepro, entities,
                                        from_prepro_level=from_prepro_level,
                                        prepro_border=prepro_border,
                                        prepro_rgi_version=prepro_rgi_version,
                                        check_demo_glacier=use_demo_glaciers,
                                        base_url=prepro_base_url)
        else:
            # We can set the intersects file automatically here
            if (cfg.PARAMS['use_intersects'] and
                    len(cfg.PARAMS['intersects_gdf']) == 0):
                try:
                    rgi_ids = np.unique(np.sort([entity.RGIId for entity in
                                                 entities]))
                    rgi_version = rgi_ids[0].split('-')[0][-2:]
                    fp = utils.get_rgi_intersects_entities(rgi_ids,
                                                           version=rgi_version)
                    cfg.set_intersects_db(fp)
                except AttributeError:
                    # List of str
                    pass

            gdirs = execute_entity_task(utils.GlacierDirectory, entities,
                                        reset=reset,
                                        from_tar=from_tar,
                                        delete_tar=delete_tar)

    return gdirs
Exemple #19
0
import xarray as xr
import matplotlib.pyplot as plt
from oggm import cfg, tasks
from oggm.utils import get_demo_file
from oggm.core.preprocessing.climate import mb_yearly_climate_on_glacier, \
    t_star_from_refmb, local_mustar_apparent_mb
from oggm.core.models.massbalance import PastMassBalanceModel

cfg.initialize()
cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
pcp_fac = 2.6
cfg.PARAMS['prcp_scaling_factor'] = pcp_fac

base_dir = os.path.join(os.path.expanduser('~'), 'Climate')
entity = gpd.read_file(get_demo_file('Hintereisferner.shp')).iloc[0]
gdir = oggm.GlacierDirectory(entity, base_dir=base_dir)

tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)

tasks.initialize_flowlines(gdir)
tasks.catchment_area(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
tasks.process_custom_climate_data(gdir)
tasks.mu_candidates(gdir)

mbdf = gdir.get_ref_mb_data()
res = t_star_from_refmb(gdir, mbdf.ANNUAL_BALANCE)
Exemple #20
0
    def setup_cache(self):

        setattr(full_workflow.setup_cache, "timeout", 360)

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0]
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)

        tasks.define_glacier_region(gdir, entity=entity)
        tasks.glacier_masks(gdir)
        tasks.compute_centerlines(gdir)
        tasks.initialize_flowlines(gdir)
        tasks.compute_downstream_line(gdir)
        tasks.compute_downstream_bedshape(gdir)
        tasks.catchment_area(gdir)
        tasks.catchment_intersections(gdir)
        tasks.catchment_width_geom(gdir)
        tasks.catchment_width_correction(gdir)
        climate.process_dummy_cru_file(gdir, seed=0)

        rho = cfg.PARAMS['ice_density']
        i = 0
        calving_flux = []
        mu_star = []
        ite = []
        cfg.PARAMS['clip_mu_star'] = False
        cfg.PARAMS['min_mu_star'] = 0  # default is now 1
        while i < 12:

            # Calculates a calving flux from model output
            if i == 0:
                # First call we set to zero (not very necessary,
                # this first loop could be removed)
                f_calving = 0
            elif i == 1:
                # Second call we set a very small positive calving
                f_calving = utils.calving_flux_from_depth(gdir, water_depth=1)
            elif cfg.PARAMS['clip_mu_star']:
                # If we have to clip mu the calving becomes the real flux
                fl = gdir.read_pickle('inversion_flowlines')[-1]
                f_calving = fl.flux[-1] * (gdir.grid.dx**2) * 1e-9 / rho
            else:
                # Otherwise it is parameterized
                f_calving = utils.calving_flux_from_depth(gdir)

            # Give it back to the inversion and recompute
            gdir.inversion_calving_rate = f_calving

            # At this step we might raise a MassBalanceCalibrationError
            mu_is_zero = False
            try:
                climate.local_t_star(gdir)
                df = gdir.read_json('local_mustar')
            except MassBalanceCalibrationError as e:
                assert 'mu* out of specified bounds' in str(e)
                # When this happens we clip mu* to zero and store the
                # bad value (just for plotting)
                cfg.PARAMS['clip_mu_star'] = True
                df = gdir.read_json('local_mustar')
                df['mu_star_glacierwide'] = float(str(e).split(':')[-1])
                climate.local_t_star(gdir)

            climate.mu_star_calibration(gdir)
            tasks.prepare_for_inversion(gdir, add_debug_var=True)
            v_inv, _ = tasks.mass_conservation_inversion(gdir)

            # Store the data
            calving_flux = np.append(calving_flux, f_calving)
            mu_star = np.append(mu_star, df['mu_star_glacierwide'])
            ite = np.append(ite, i)

            # Do we have to do another_loop?
            if i > 0:
                avg_one = np.mean(calving_flux[-4:])
                avg_two = np.mean(calving_flux[-5:-1])
                difference = abs(avg_two - avg_one)
                conv = (difference < 0.05 * avg_two or calving_flux[-1] == 0
                        or calving_flux[-1] == calving_flux[-2])
                if mu_is_zero or conv:
                    break
            i += 1

        assert i < 8
        assert calving_flux[-1] < np.max(calving_flux)
        assert calving_flux[-1] > 2
        assert mu_star[-1] == 0

        mbmod = massbalance.MultipleFlowlineMassBalance
        mb = mbmod(gdir,
                   use_inversion_flowlines=True,
                   mb_model_class=massbalance.ConstantMassBalance,
                   bias=0)
        flux_mb = (mb.get_specific_mb() * gdir.rgi_area_m2) * 1e-9 / rho
        np.testing.assert_allclose(flux_mb, calving_flux[-1], atol=0.001)

        return calving_flux, mu_star
Exemple #21
0
    def test_run_constant_climate(self):
        """ Test the run_constant_climate task for a climate based on the
        equilibrium period centred around t*. Additionally a positive and a
        negative temperature bias are tested.

        """
        # let's not use the mass balance bias since we want to reproduce
        # results from mass balance calibration
        cfg.PARAMS['use_bias_for_run'] = False

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)
        # compute mass balance parameters
        ref_df = cfg.PARAMS['vas_ref_tstars_rgi5_histalp']
        vascaling.local_t_star(gdir, ref_df=ref_df)

        # define some parameters for the constant climate model
        nyears = 500
        temp_bias = 0.5
        _ = vascaling.run_constant_climate(gdir,
                                           nyears=nyears,
                                           output_filesuffix='')
        _ = vascaling.run_constant_climate(gdir,
                                           nyears=nyears,
                                           temperature_bias=+temp_bias,
                                           output_filesuffix='_bias_p')
        _ = vascaling.run_constant_climate(gdir,
                                           nyears=nyears,
                                           temperature_bias=-temp_bias,
                                           output_filesuffix='_bias_n')

        # compile run outputs
        ds = utils.compile_run_output([gdir], input_filesuffix='')
        ds_p = utils.compile_run_output([gdir], input_filesuffix='_bias_p')
        ds_n = utils.compile_run_output([gdir], input_filesuffix='_bias_n')

        # the glacier should not change under a constant climate
        # based on the equilibirum period centered around t*
        assert abs(1 - ds.volume.mean() / ds.volume[0]) < 1e-7
        # higher temperatures should result in a smaller glacier
        assert ds.volume.mean() > ds_p.volume.mean()
        # lower temperatures should result in a larger glacier
        assert ds.volume.mean() < ds_n.volume.mean()

        # compute volume change from one year to the next
        dV_p = (ds_p.volume[1:].values - ds_p.volume[:-1].values).flatten()
        dV_n = (ds_n.volume[1:].values - ds_n.volume[:-1].values).flatten()
        # compute relative volume change, with respect to the final volume
        rate_p = abs(dV_p / float(ds_p.volume.values[-1]))
        rate_n = abs(dV_n / float(ds_n.volume.values[-1]))
        # the glacier should be in a new equilibirum for last 300 years
        assert max(rate_p[-300:]) < 0.001
        assert max(rate_n[-300:]) < 0.001
Exemple #22
0
    def test_yearly_mb_temp_prcp(self):
        """Test the routine which returns the yearly mass balance relevant
        climate parameters, i.e. positive melting temperature and solid
        precipitation. The testing target is the output of the corresponding
        OGGM routine `get_yearly_mb_climate_on_glacier(gdir)`.
        """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)
        # run centerline prepro tasks
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)
        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # get yearly sums of terminus temperature and solid precipitation
        years, temp, prcp = vascaling.get_yearly_mb_temp_prcp(gdir)

        # use the OGGM methode to get the mass balance
        # relevant climate parameters
        years_oggm, temp_oggm, prcp_oggm = \
            climate.mb_yearly_climate_on_glacier(gdir)

        # the energy input at the glacier terminus must be greater than (or
        # equal to) the glacier wide average, since the air temperature drops
        # with elevation, i.e. the mean deviation must be positive, using the
        # OGGM data as reference
        assert md(temp_oggm, temp) >= 0
        # consequentially, the average mass input must be less than (or equal
        # to) the mass input integrated over the whole glacier surface, i.e.
        # the mean deviation must be negative, using the OGGM data as reference
        # TODO: does it actually?! And if so, why?! @ASK
        assert md(prcp_oggm, prcp) <= 0

        # correlation must be higher than set threshold
        assert corrcoef(temp, temp_oggm) >= 0.94
        assert corrcoef(prcp, prcp_oggm) >= 0.98

        # get terminus temperature using the OGGM routine
        fpath = gdir.get_filepath('gridded_data')
        with ncDataset(fpath) as nc:
            mask = nc.variables['glacier_mask'][:]
            topo = nc.variables['topo'][:]
        heights = np.array([np.min(topo[np.where(mask == 1)])])
        years_height, temp_height, _ = \
            climate.mb_yearly_climate_on_height(gdir, heights, flatten=False)
        temp_height = temp_height[0]
        # both time series must be equal
        np.testing.assert_array_equal(temp, temp_height)

        # get solid precipitation averaged over the glacier
        # (not weighted with widths)
        fls = gdir.read_pickle('inversion_flowlines')
        heights = np.array([])
        for fl in fls:
            heights = np.append(heights, fl.surface_h)
        years_height, _, prcp_height = \
            climate.mb_yearly_climate_on_height(gdir, heights, flatten=True)
        # correlation must be higher than set threshold
        assert corrcoef(prcp, prcp_height) >= 0.99

        # TODO: assert absolute values (or differences) of precipitation @ASK

        # test exception handling of out of bounds time/year range
        with self.assertRaises(climate.MassBalanceCalibrationError):
            # start year out of bounds
            year_range = [1500, 1980]
            _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir,
                                                        year_range=year_range)
        with self.assertRaises(climate.MassBalanceCalibrationError):
            # end year oud of bounds
            year_range = [1980, 3000]
            _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir,
                                                        year_range=year_range)
        with self.assertRaises(ValueError):
            # get not N full years
            t0 = datetime.datetime(1980, 1, 1)
            t1 = datetime.datetime(1980, 3, 1)
            time_range = [t0, t1]
            _, _, _ = vascaling.get_yearly_mb_temp_prcp(gdir,
                                                        time_range=time_range)

        # TODO: assert gradient in climate file?!

        pass
Exemple #23
0
    def test_solid_prcp(self):
        """Tests the subroutine which computes solid precipitation amount from
        given total precipitation and temperature.
        """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid
        gis.define_glacier_region(gdir, entity=entity)
        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # read the following variable from the center pixel (46.83N 10.75E)
        # of the Hintereisferner HistAlp climate file for the
        # entire time period from October 1801 until September 2003
        # - surface height in m asl.
        # - total precipitation amount in kg/m2
        # - 2m air temperature in °C
        with utils.ncDataset(get_demo_file('histalp_merged_hef.nc')) as nc_r:
            ref_h = nc_r.variables['hgt'][1, 1]
            ref_p = nc_r.variables['prcp'][:, 1, 1]
            ref_t = nc_r.variables['temp'][:, 1, 1]

        # define needed parameters
        prcp_factor = 1
        temp_all_solid = 0
        temp_grad = -0.0065

        # define elevation levels
        ref_hgt = ref_h
        min_hgt = ref_h - 100
        max_hgt = ref_h + 100

        # if the terminus temperature is below the threshold for
        # solid precipitation all fallen precipitation must be solid
        temp_terminus = ref_t * 0 + temp_all_solid
        solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt,
                                                   min_hgt, max_hgt,
                                                   temp_terminus,
                                                   temp_all_solid, temp_grad,
                                                   prcp_grad=0, prcp_anomaly=0)
        np.testing.assert_allclose(solid_prcp, ref_p)

        # if the temperature at the maximal elevation is above the threshold
        # for solid precipitation all fallen precipitation must be liquid
        temp_terminus = ref_t + 100
        solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt,
                                                   min_hgt, max_hgt,
                                                   temp_terminus,
                                                   temp_all_solid, temp_grad,
                                                   prcp_grad=0, prcp_anomaly=0)
        np.testing.assert_allclose(solid_prcp, 0)

        # test extreme case if max_hgt equals min_hgt
        test_p = ref_p * (ref_t <= temp_all_solid).astype(int)
        solid_prcp = vascaling._compute_solid_prcp(ref_p, prcp_factor, ref_hgt,
                                                   ref_hgt, ref_hgt, ref_t,
                                                   temp_all_solid, temp_grad,
                                                   prcp_grad=0, prcp_anomaly=0)
        np.testing.assert_allclose(solid_prcp, test_p)
Exemple #24
0
def init_hef(reset=False,
             border=40,
             invert_with_sliding=True,
             invert_with_rectangular=True):

    from oggm.core.preprocessing import gis, centerlines, geometry
    from oggm.core.preprocessing import climate, inversion
    import oggm
    import oggm.cfg as cfg
    from oggm.utils import get_demo_file

    # test directory
    testdir = os.path.join(cfg.PATHS['test_dir'],
                           'tmp_border{}'.format(border))
    if not invert_with_sliding:
        testdir += '_withoutslide'
    if not invert_with_rectangular:
        testdir += '_withoutrectangular'
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.process_histalp_nonparallel([gdir])
    climate.mu_candidates(gdir, div_id=0)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf)
    climate.local_mustar_apparent_mb(gdir,
                                     tstar=res['t_star'][-1],
                                     bias=res['bias'][-1],
                                     prcp_fac=res['prcp_fac'])

    inversion.prepare_for_inversion(
        gdir,
        add_debug_var=True,
        invert_with_rectangular=invert_with_rectangular)
    ref_v = 0.573 * 1e9

    if invert_with_sliding:

        def to_optimize(x):
            # For backwards compat
            _fd = 1.9e-24 * x[0]
            glen_a = (cfg.N + 2) * _fd / 2.
            fs = 5.7e-20 * x[1]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=fs,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1, 1],
                                    bounds=((0.01, 10), (0.01, 10)),
                                    tol=1e-4)['x']
        _fd = 1.9e-24 * out[0]
        glen_a = (cfg.N + 2) * _fd / 2.
        fs = 5.7e-20 * out[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    else:

        def to_optimize(x):
            glen_a = cfg.A * x[0]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=0.,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1],
                                    bounds=((0.01, 10), ),
                                    tol=1e-4)['x']
        glen_a = cfg.A * out[0]
        fs = 0.
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    try:
        d['factor_fs'] = out[1]
    except IndexError:
        d['factor_fs'] = 0.
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness(gdir, how='per_altitude', add_nc_name=True)
    inversion.distribute_thickness(gdir,
                                   how='per_interpolation',
                                   add_slope=False,
                                   smooth=False,
                                   add_nc_name=True)

    return gdir
Exemple #25
0
import geopandas as gpd
import oggm
from oggm import cfg, tasks
from oggm.utils import get_demo_file

# Set up the input data for this example
cfg.initialize()
cfg.PATHS['working_dir'] = oggm.utils.get_temp_dir('oggmcontrib_inv')
cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')
cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))

# Glacier directory for Hintereisferner in Austria
entity = gpd.read_file(get_demo_file('Hintereisferner_RGI5.shp')).iloc[0]
gdir = oggm.GlacierDirectory(entity)

# The usual OGGM preprecessing
tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
Exemple #26
0
def init_glacier_regions(rgidf=None, reset=False, force=False):
    """Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.

    Returns
    -------
    a list of GlacierDirectory objects
    """

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without a rgi file')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:
        for _, entity in rgidf.iterrows():
            gdir = oggm.GlacierDirectory(entity, reset=reset)
            if not os.path.exists(gdir.get_filepath('dem')):
                new_gdirs.append((gdir, dict(entity=entity)))
            gdirs.append(gdir)

    # We can set the intersects file automatically here
    if (cfg.PARAMS['use_intersects'] and new_gdirs
            and (len(cfg.PARAMS['intersects_gdf']) == 0)):
        rgi_ids = np.unique(np.sort([t[0].rgi_id for t in new_gdirs]))
        rgi_version = new_gdirs[0][0].rgi_version
        fp = utils.get_rgi_intersects_entities(rgi_ids, version=rgi_version)
        cfg.set_intersects_db(fp)

    # If not initialized, run the task in parallel
    execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemple #27
0
def init_glacier_regions(rgidf=None, *, reset=False, force=False,
                         from_prepro_level=None, prepro_border=None,
                         prepro_rgi_version=None, prepro_base_url=None,
                         from_tar=False, delete_tar=False,
                         use_demo_glaciers=None):
    """DEPRECATED: Initializes the list of Glacier Directories for this run.

    This is the very first task to do (always). If the directories are already
    available in the working directory, use them. If not, create new ones.

    Parameters
    ----------
    rgidf : GeoDataFrame or list of ids, optional for pre-computed runs
        the RGI glacier outlines. If unavailable, OGGM will parse the
        information from the glacier directories found in the working
        directory. It is required for new runs.
    reset : bool
        delete the existing glacier directories if found.
    force : bool
        setting `reset=True` will trigger a yes/no question to the user. Set
        `force=True` to avoid this.
    from_prepro_level : int
        get the gdir data from the official pre-processed pool. See the
        documentation for more information
    prepro_border : int
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['border']`
    prepro_rgi_version : str
        for `from_prepro_level` only: if you want to override the default
        behavior which is to use `cfg.PARAMS['rgi_version']`
    prepro_base_url : str
        for `from_prepro_level` only: if you want to override the default
        URL from which to download the gdirs. Default currently is
        https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.1/
    use_demo_glaciers : bool
        whether to check the demo glaciers for download (faster than the
        standard prepro downloads). The default is to decide whether or
        not to check based on simple criteria such as glacier list size.
    from_tar : bool, default=False
        extract the gdir data from a tar file. If set to `True`,
        will check for a tar file at the expected location in `base_dir`.
    delete_tar : bool, default=False
        delete the original tar file after extraction.
    delete_tar : bool, default=False
        delete the original tar file after extraction.

    Returns
    -------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the initialised glacier directories

    Notes
    -----
    This task is deprecated in favor of the more explicit
    init_glacier_directories. Indeed, init_glacier_directories is very
    similar to init_glacier_regions, but it does not process the DEMs:
    a glacier directory is valid also without DEM.
    """

    _check_duplicates(rgidf)

    if reset and not force:
        reset = utils.query_yes_no('Delete all glacier directories?')

    if prepro_border is None:
        prepro_border = int(cfg.PARAMS['border'])

    if from_prepro_level and prepro_border not in [10, 80, 160, 250]:
        if 'test' not in utils._downloads.GDIR_URL:
            raise InvalidParamsError("prepro_border or cfg.PARAMS['border'] "
                                     "should be one of: 10, 80, 160, 250.")

    # if reset delete also the log directory
    if reset:
        fpath = os.path.join(cfg.PATHS['working_dir'], 'log')
        if os.path.exists(fpath):
            rmtree(fpath)

    gdirs = []
    new_gdirs = []
    if rgidf is None:
        if reset:
            raise ValueError('Cannot use reset without setting rgidf')
        log.workflow('init_glacier_regions by parsing available folders '
                     '(can be slow).')
        # The dirs should be there already
        gl_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier')
        for root, _, files in os.walk(gl_dir):
            if files and ('dem.tif' in files):
                gdirs.append(oggm.GlacierDirectory(os.path.basename(root)))
    else:

        # Check if dataframe or list of strs
        try:
            entities = []
            for _, entity in rgidf.iterrows():
                entities.append(entity)
        except AttributeError:
            entities = utils.tolist(rgidf)

        # Check demo
        if use_demo_glaciers is None:
            use_demo_glaciers = len(entities) < 100

        if from_prepro_level is not None:
            log.workflow('init_glacier_regions from prepro level {} on '
                         '{} glaciers.'.format(from_prepro_level,
                                               len(entities)))
            # Read the hash dictionary before we use multiproc
            if cfg.PARAMS['dl_verify']:
                utils.get_dl_verify_data('cluster.klima.uni-bremen.de')
            gdirs = execute_entity_task(gdir_from_prepro, entities,
                                        from_prepro_level=from_prepro_level,
                                        prepro_border=prepro_border,
                                        prepro_rgi_version=prepro_rgi_version,
                                        check_demo_glacier=use_demo_glaciers,
                                        base_url=prepro_base_url)
        else:
            # We can set the intersects file automatically here
            if (cfg.PARAMS['use_intersects'] and
                    len(cfg.PARAMS['intersects_gdf']) == 0):
                rgi_ids = np.unique(np.sort([entity.RGIId for entity in
                                             entities]))
                rgi_version = rgi_ids[0].split('-')[0][-2:]
                fp = utils.get_rgi_intersects_entities(rgi_ids,
                                                       version=rgi_version)
                cfg.set_intersects_db(fp)

            gdirs = execute_entity_task(utils.GlacierDirectory, entities,
                                        reset=reset,
                                        from_tar=from_tar,
                                        delete_tar=delete_tar)

            for gdir in gdirs:
                if not os.path.exists(gdir.get_filepath('dem')):
                    new_gdirs.append(gdir)

    if len(new_gdirs) > 0:
        # If not initialized, run the task in parallel
        execute_entity_task(tasks.define_glacier_region, new_gdirs)

    return gdirs
Exemple #28
0
dir_path = os.path.join(tempfile.gettempdir(), 'fig_01')
fig_path = os.path.join(PLOT_DIR, 'workflow_tas.pdf')

cfg.initialize()
cfg.PARAMS['border'] = 20

cfg.PATHS['working_dir'] = dir_path
utils.mkdir(dir_path, reset=True)

rgidf = utils.get_rgi_glacier_entities(['RGI60-18.02342'])
entity = rgidf.iloc[0]

cfg.set_intersects_db(utils.get_rgi_intersects_entities(['RGI60-18.02342']))

gdir = oggm.GlacierDirectory(entity, base_dir=dir_path)

tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.compute_downstream_bedshape(gdir)
tasks.catchment_area(gdir)
tasks.catchment_intersections(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
tasks.process_cru_data(gdir)
tasks.local_t_star(gdir)
tasks.mu_star_calibration(gdir)
Exemple #29
0
    def test_run_until_and_store(self):
        """Test the volume/area scaling model against the oggm.FluxBasedModel.

        Both models run the Hintereisferner over the entire HistAlp climate
        period, initialized with the 2003 RGI outline without spin up.

        The following two parameters for length, area and volume are tested:
            - correlation coefficient
            - relative RMSE, i.e. RMSE/mean(OGGM). Whereby the results from the
                VAS model are offset with the average differences to the OGGM
                results.
       """

        # read the Hintereisferner DEM
        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        # initialize the GlacierDirectory
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
        # define the local grid and glacier mask
        gis.define_glacier_region(gdir, entity=entity)
        gis.glacier_masks(gdir)

        # process the given climate file
        climate.process_custom_climate_data(gdir)

        # run center line preprocessing tasks
        centerlines.compute_centerlines(gdir)
        centerlines.initialize_flowlines(gdir)
        centerlines.compute_downstream_line(gdir)
        centerlines.compute_downstream_bedshape(gdir)
        centerlines.catchment_area(gdir)
        centerlines.catchment_intersections(gdir)
        centerlines.catchment_width_geom(gdir)
        centerlines.catchment_width_correction(gdir)

        # read reference glacier mass balance data
        mbdf = gdir.get_ref_mb_data()
        # compute the reference t* for the glacier
        # given the reference of mass balance measurements
        res = climate.t_star_from_refmb(gdir, mbdf=mbdf['ANNUAL_BALANCE'])
        t_star, bias = res['t_star'], res['bias']

        # --------------------
        #  SCALING MODEL
        # --------------------

        # compute local t* and the corresponding mu*
        vascaling.local_t_star(gdir, tstar=t_star, bias=bias)

        # instance the mass balance models
        vas_mbmod = vascaling.VAScalingMassBalance(gdir)

        # get reference area
        a0 = gdir.rgi_area_m2
        # get reference year
        y0 = gdir.read_json('climate_info')['baseline_hydro_yr_0']
        # get min and max glacier surface elevation
        h0, h1 = vascaling.get_min_max_elevation(gdir)

        vas_model = vascaling.VAScalingModel(year_0=y0, area_m2_0=a0,
                                             min_hgt=h0, max_hgt=h1,
                                             mb_model=vas_mbmod)

        # let model run over entire HistAlp climate period
        vas_ds = vas_model.run_until_and_store(2003)

        # ------
        #  OGGM
        # ------

        # compute local t* and the corresponding mu*
        climate.local_t_star(gdir, tstar=t_star, bias=bias)
        climate.mu_star_calibration(gdir)

        # instance the mass balance models
        mb_mod = massbalance.PastMassBalance(gdir)

        # perform ice thickness inversion
        inversion.prepare_for_inversion(gdir)
        inversion.mass_conservation_inversion(gdir)
        inversion.filter_inversion_output(gdir)

        # initialize present time glacier
        flowline.init_present_time_glacier(gdir)

        # instance flowline model
        fls = gdir.read_pickle('model_flowlines')
        y0 = gdir.read_json('climate_info')['baseline_hydro_yr_0']
        fl_mod = flowline.FluxBasedModel(flowlines=fls, mb_model=mb_mod, y0=y0)

        # run model and store output as xarray data set
        _, oggm_ds = fl_mod.run_until_and_store(2003)

        # temporal indices must be equal
        assert (vas_ds.time == oggm_ds.time).all()

        # specify which parameters to compare and their respective correlation
        # coefficients and rmsd values
        params = ['length_m', 'area_m2', 'volume_m3']
        corr_coeffs = np.array([0.96, 0.90, 0.93])
        rmsds = np.array([0.43e3, 0.14e6, 0.03e9])

        # compare given parameters
        for param, cc, rmsd in zip(params, corr_coeffs, rmsds):
            # correlation coefficient
            assert corrcoef(oggm_ds[param].values, vas_ds[param].values) >= cc
            # root mean squared deviation
            rmsd_an = rmsd_bc(oggm_ds[param].values, vas_ds[param].values)
            assert rmsd_an <= rmsd
Exemple #30
0
def init_hef(reset=False,
             border=40,
             invert_with_sliding=True,
             invert_with_rectangular=True):

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not invert_with_sliding:
        testdir += '_withoutslide'
    if not invert_with_rectangular:
        testdir += '_withoutrectangular'
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['border'] = border
    cfg.PARAMS['use_optimized_inversion_params'] = True

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity=entity)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    climate.mu_candidates(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf)
    climate.local_mustar(gdir,
                         tstar=res['t_star'][-1],
                         bias=res['bias'][-1],
                         prcp_fac=res['prcp_fac'])
    climate.apparent_mb(gdir)

    inversion.prepare_for_inversion(
        gdir,
        add_debug_var=True,
        invert_with_rectangular=invert_with_rectangular)
    ref_v = 0.573 * 1e9

    if invert_with_sliding:

        def to_optimize(x):
            # For backwards compat
            _fd = 1.9e-24 * x[0]
            glen_a = (cfg.N + 2) * _fd / 2.
            fs = 5.7e-20 * x[1]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=fs,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1, 1],
                                    bounds=((0.01, 10), (0.01, 10)),
                                    tol=1e-4)['x']
        _fd = 1.9e-24 * out[0]
        glen_a = (cfg.N + 2) * _fd / 2.
        fs = 5.7e-20 * out[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    else:

        def to_optimize(x):
            glen_a = cfg.A * x[0]
            v, _ = inversion.mass_conservation_inversion(gdir,
                                                         fs=0.,
                                                         glen_a=glen_a)
            return (v - ref_v)**2

        out = optimization.minimize(to_optimize, [1],
                                    bounds=((0.01, 10), ),
                                    tol=1e-4)['x']
        glen_a = cfg.A * out[0]
        fs = 0.
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a,
                                                     write=True)
    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    try:
        d['factor_fs'] = out[1]
    except IndexError:
        d['factor_fs'] = 0.
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir