Exemple #1
0
    def test_random(self):

        # Fake Reset (all these tests are horribly coded)
        if not os.path.exists(TEST_DIR):
            os.makedirs(TEST_DIR)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
        gdirs = up_to_inversion()

        workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
        workflow.execute_entity_task(flowline.run_random_climate, gdirs,
                                     nyears=200, seed=0,
                                     store_monthly_step=True,
                                     output_filesuffix='_test')

        for gd in gdirs:

            path = gd.get_filepath('model_run', filesuffix='_test')
            # See that we are running ok
            with flowline.FileModel(path) as model:
                vol = model.volume_km3_ts()
                area = model.area_km2_ts()
                length = model.length_m_ts()

                self.assertTrue(np.all(np.isfinite(vol) & vol != 0.))
                self.assertTrue(np.all(np.isfinite(area) & area != 0.))
                self.assertTrue(np.all(np.isfinite(length) & length != 0.))

            ds_diag = gd.get_filepath('model_diagnostics', filesuffix='_test')
            ds_diag = xr.open_dataset(ds_diag)
            df = vol.to_frame('RUN')
            df['DIAG'] = ds_diag.volume_m3.to_series() * 1e-9
            assert_allclose(df.RUN, df.DIAG)
            df = area.to_frame('RUN')
            df['DIAG'] = ds_diag.area_m2.to_series() * 1e-6
            assert_allclose(df.RUN, df.DIAG)
            df = length.to_frame('RUN')
            df['DIAG'] = ds_diag.length_m.to_series()
            assert_allclose(df.RUN, df.DIAG)

        # Test output
        ds = utils.compile_run_output(gdirs, filesuffix='_test')
        assert_allclose(ds_diag.volume_m3, ds.volume.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.area_m2, ds.area.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.length_m, ds.length.sel(rgi_id=gd.rgi_id))
        # Test output
        ds = utils.compile_run_output(gdirs, filesuffix='_test')
        df = ds.volume.sel(rgi_id=gd.rgi_id).to_series().to_frame('OUT')
        df['RUN'] = ds_diag.volume_m3.to_series()
        assert_allclose(df.RUN, df.OUT)
Exemple #2
0
def random_for_plot():

    # Fake Reset (all these tests are horribly coded)
    try:
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
    except FileNotFoundError:
        pass
    gdirs = up_to_inversion()

    workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
    workflow.execute_entity_task(flowline.run_random_climate, gdirs,
                                 nyears=10, seed=0, output_filesuffix='_plot')
    return gdirs
Exemple #3
0
def run_with_job_array(y0, nyears, halfsize, mtype, prcp_prefix=None,
                       temp_prefix=None, run_for_test=False, mean_years=None,
                       output_dir=None, output_filesuffix=None):

    if output_dir is None:
        outpath = utils.mkdir(cluster_dir, reset=False)
    else:
        outpath = utils.mkdir(os.path.join(cluster_dir, output_dir),
                              reset=False)
    gdirs = pre_process_tasks(run_for_test=run_for_test)
    if mtype == 'origin':
        suffix = f'_origin_hf{halfsize}'
        workflow.execute_entity_task(run_my_random_climate, gdirs, 
                                     nyears=nyears, y0=y0, seed=1,
                                     halfsize=halfsize,
                                     output_filesuffix=f'_origin_hf{halfsize}',
                                     mean_years=mean_years)
    else:
        if CLIMATE_DATA == '2':
            mtype = '_' + mtype
        if prcp_prefix:
            fpath_prcp_diff = os.path.join(data_dir, f'{prcp_prefix}{mtype}.nc')
        else:
            fpath_prcp_diff = None

        if temp_prefix:
            fpath_temp_diff = os.path.join(data_dir, f'{temp_prefix}{mtype}.nc')
        else:
            fpath_temp_diff = None

        if output_filesuffix is None:
            output_filesuffix = f'_exper_{mtype}_hf{halfsize}'
        workflow.execute_entity_task(run_my_random_climate, gdirs, nyears=nyears,
                                     y0=y0, seed=1, halfsize=halfsize,
                                     output_filesuffix=output_filesuffix,
                                     mean_years=mean_years,
                                     fpath_temp_diff=fpath_temp_diff,
                                     fpath_prcp_diff=fpath_prcp_diff)

    ds = utils.compile_run_output(gdirs, input_filesuffix=output_filesuffix, path=False)
    # to avoid cluster stull problem report in:
    # https://github.com/OGGM/oggm/pull/1122 and 
    # https://github.com/pydata/xarray/issues/4710
    print(f"Save result{output_filesuffix}.nc")

    ds.load().to_netcdf(path=os.path.join(outpath, 'result'+output_filesuffix+'.nc'))
Exemple #4
0
def random_for_plot():

    # Fake Reset (all these tests are horribly coded)
    try:
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
    except FileNotFoundError:
        pass
    gdirs = up_to_inversion()

    workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
    workflow.execute_entity_task(flowline.run_random_climate,
                                 gdirs,
                                 nyears=10,
                                 seed=0,
                                 output_filesuffix='_plot')
    return gdirs
Exemple #5
0
def pre_process_tasks(run_for_test=False):

    path10 = utils.get_rgi_region_file('10', '61')
    path13 = utils.get_rgi_region_file('13', '61')
    path14 = utils.get_rgi_region_file('14', '61')
    path15 = utils.get_rgi_region_file('15', '61')
    rgidf10 = gpd.read_file(path10)
    rgidf10 = rgidf10[rgidf10.O2Region == '4']
    rgidf13 = gpd.read_file(path13)
    rgidf14 = gpd.read_file(path14)
    rgidf15 = gpd.read_file(path15)
    rgidf = pd.concat([rgidf10, rgidf13, rgidf14, rgidf15])
    if (not run_in_cluster) or run_for_test:
        rgidf = rgidf10.iloc[0:5, :]
    cfg.initialize()
    cfg.PARAMS['border'] = 160
    cfg.PATHS['working_dir'] = utils.mkdir(working_dir)
    cfg.PARAMS['continue_on_error'] = True
    cfg.PARAMS['use_multiprocessing'] = True

    gdirs = workflow.init_glacier_directories(rgidf, from_prepro_level=1,
                                              reset=True, force=True)
    task_list = [
        tasks.define_glacier_region,
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape, 
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.process_cru_data,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output, 
        tasks.init_present_time_glacier
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    return gdirs
def preprocessing(gdirs):
    # Prepro tasks
    task_list = [
        tasks.glacier_masks,
        # tasks.compute_centerlines,
        # tasks.initialize_flowlines,
        # tasks.catchment_area,
        # tasks.catchment_intersections,
        # tasks.catchment_width_geom,
        # tasks.catchment_width_correction,
    ]
    for task in task_list:
        execute_entity_task(task, gdirs)

    # Climate tasks
    execute_entity_task(tasks.process_histalp_data, gdirs)

    return gdirs
Exemple #7
0
    def test_shapefile_output(self):

        gdirs = up_to_climate(use_mp=True)

        fpath = os.path.join(_TEST_DIR, 'centerlines.shp')
        write_centerlines_to_shape(gdirs, path=fpath)

        import salem
        shp = salem.read_shapefile(fpath)
        self.assertTrue(shp is not None)
        shp = shp.loc[shp.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp), 3)
        self.assertEqual(shp.loc[shp.LE_SEGMENT.idxmax()].MAIN, 1)

        fpath = os.path.join(_TEST_DIR, 'flowlines.shp')
        write_centerlines_to_shape(gdirs, path=fpath, flowlines_output=True)
        shp_f = salem.read_shapefile(fpath)
        self.assertTrue(shp_f is not None)
        shp_f = shp_f.loc[shp_f.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_f), 3)
        self.assertEqual(shp_f.loc[shp_f.LE_SEGMENT.idxmax()].MAIN, 1)
        # The flowline is cut so shorter
        assert shp_f.LE_SEGMENT.max() < shp.LE_SEGMENT.max() * 0.8

        fpath = os.path.join(_TEST_DIR, 'widths_geom.shp')
        write_centerlines_to_shape(gdirs,
                                   path=fpath,
                                   geometrical_widths_output=True)
        # Salem can't read it
        shp_w = gpd.read_file(fpath)
        self.assertTrue(shp_w is not None)
        shp_w = shp_w.loc[shp_w.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_w), 90)

        fpath = os.path.join(_TEST_DIR, 'widths_corr.shp')
        write_centerlines_to_shape(gdirs,
                                   path=fpath,
                                   corrected_widths_output=True)
        # Salem can't read it
        shp_w = gpd.read_file(fpath)
        self.assertTrue(shp_w is not None)
        shp_w = shp_w.loc[shp_w.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_w), 90)

        # Test that one wrong glacier still works
        base_dir = os.path.join(cfg.PATHS['working_dir'], 'dummy_pergla')
        utils.mkdir(base_dir, reset=True)
        gdirs = workflow.execute_entity_task(utils.copy_to_basedir,
                                             gdirs,
                                             base_dir=base_dir,
                                             setup='all')
        os.remove(gdirs[0].get_filepath('centerlines'))
        cfg.PARAMS['continue_on_error'] = True
        write_centerlines_to_shape(gdirs)
Exemple #8
0
    def test_random(self):

        gdirs = up_to_inversion()

        workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
        rand_glac = partial(flowline.random_glacier_evolution, nyears=200)
        workflow.execute_entity_task(rand_glac, gdirs)

        for gd in gdirs:

            path = gd.get_filepath('past_model')

            # See that we are running ok
            with flowline.FileModel(path) as model:
                vol = model.volume_km3_ts()
                area = model.area_km2_ts()
                len = model.length_m_ts()

                self.assertTrue(np.all(np.isfinite(vol) & vol != 0.))
                self.assertTrue(np.all(np.isfinite(area) & area != 0.))
                self.assertTrue(np.all(np.isfinite(len) & len != 0.))
Exemple #9
0
def hef_gdir(test_dir):
    cfg.initialize(logging_level='CRITICAL')
    cfg.PARAMS['use_multiple_flowlines'] = False
    cfg.PARAMS['use_rgi_area'] = False
    cfg.PATHS['working_dir'] = test_dir

    rgi_ids = ['RGI60-11.00897']
    gl = utils.get_rgi_glacier_entities(rgi_ids)
    gdirs = workflow.init_glacier_directories(gl, from_prepro_level=1)

    global_tasks.gis_prepro_tasks(gdirs)

    cfg.PARAMS['baseline_climate'] = 'CRU'
    global_tasks.climate_tasks(
        gdirs,
        base_url='https://cluster.klima.uni-bremen.de/~oggm/ref_mb_params'
        '/oggm_v1.4/RGIV62/CRU/centerlines/qc3/pcp2.5')

    execute_entity_task(tasks.prepare_for_inversion,
                        gdirs,
                        invert_all_trapezoid=True)
    execute_entity_task(tasks.mass_conservation_inversion, gdirs)

    execute_entity_task(tasks.init_present_time_glacier, gdirs)

    return gdirs[0]
Exemple #10
0
def up_to_distrib(reset=False):
    # for cross val basically

    gdirs = up_to_climate(reset=reset)

    with open(CLI_LOGF, 'rb') as f:
        clilog = pickle.load(f)

    if clilog != 'cru':
        reset = True
    else:
        try:
            tasks.compute_ref_t_stars(gdirs)
        except Exception:
            reset = True

    if reset:
        # Use CRU
        cfg.PARAMS['prcp_scaling_factor'] = 2.5
        cfg.PARAMS['temp_use_local_gradient'] = False
        cfg.PARAMS['baseline_climate'] = 'CRU'
        with warnings.catch_warnings():
            # There is a warning from salem
            warnings.simplefilter("ignore")
            workflow.execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.compute_ref_t_stars(gdirs)
        workflow.execute_entity_task(tasks.local_t_star, gdirs)
        workflow.execute_entity_task(tasks.mu_star_calibration, gdirs)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('cru', f)

    return gdirs
Exemple #11
0
def up_to_distrib(reset=False):
    # for cross val basically

    gdirs = up_to_climate(reset=reset)

    with open(CLI_LOGF, 'rb') as f:
        clilog = pickle.load(f)

    if clilog != 'cru':
        reset = True
    else:
        try:
            tasks.compute_ref_t_stars(gdirs)
        except Exception:
            reset = True

    if reset:
        # Use CRU
        cfg.PARAMS['prcp_scaling_factor'] = 2.5
        cfg.PARAMS['temp_use_local_gradient'] = False
        cfg.PARAMS['baseline_climate'] = 'CRU'
        cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc')
        cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir)
        with warnings.catch_warnings():
            # There is a warning from salem
            warnings.simplefilter("ignore")
            workflow.execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.compute_ref_t_stars(gdirs)
        workflow.execute_entity_task(tasks.local_t_star, gdirs)
        workflow.execute_entity_task(tasks.mu_star_calibration, gdirs)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('cru', f)

    return gdirs
Exemple #12
0
def up_to_distrib(reset=False):
    # for cross val basically

    gdirs = up_to_climate(reset=reset)

    with open(CLI_LOGF, 'rb') as f:
        clilog = pickle.load(f)

    if clilog != 'cru':
        reset = True
    else:
        try:
            tasks.compute_ref_t_stars(gdirs)
        except Exception:
            reset = True

    if reset:
        # Use CRU
        cfg.PARAMS['prcp_scaling_factor'] = 2.5
        cfg.PARAMS['temp_use_local_gradient'] = False
        cfg.PARAMS['baseline_climate'] = 'CRU'
        cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc')
        cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir)
        with warnings.catch_warnings():
            # There is a warning from salem
            warnings.simplefilter("ignore")
            workflow.execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.compute_ref_t_stars(gdirs)
        workflow.execute_entity_task(tasks.local_mustar, gdirs)
        workflow.execute_entity_task(tasks.apparent_mb, gdirs)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('cru', f)

    return gdirs
def process_cmip_for_merged_glacier(gdir, filesuffix, ft, fp):

    rgi = gdir.rgi_id.split('_')[0]

    rgis = merge_pair_dict(rgi)[0] + [rgi]

    gdirs = init_glacier_regions(rgis, prepro_border=10, from_prepro_level=1)
    execute_entity_task(tasks.process_histalp_data, gdirs)

    execute_entity_task(gcm_climate.process_cmip5_data,
                        gdirs,
                        filesuffix=filesuffix,
                        fpath_temp=ft,
                        fpath_precip=fp)

    for gd in gdirs:
        # copy climate files
        shutil.copyfile(
            gd.get_filepath('gcm_data', filesuffix=filesuffix),
            gdir.get_filepath('gcm_data',
                              filesuffix='_{}{}'.format(gd.rgi_id,
                                                        filesuffix)))
    def test_dt_per_dt(self, case_dir):

        cfg.initialize()
        cfg.PARAMS['prcp_scaling_factor'] = 1.6
        cfg.PATHS['working_dir'] = case_dir

        # Go - get the pre-processed glacier directories
        gdirs = workflow.init_glacier_directories(
            ['RGI60-11.00897', 'RGI60-03.04384'],
            from_prepro_level=5,
            prepro_base_url=prepro_base_url,
            prepro_border=80,
            prepro_rgi_version='62')

        workflow.execute_entity_task(parse_dt_per_dt, gdirs)

        dt1 = gdirs[0].get_diagnostics()['magicc_dt_per_dt']
        dt2 = gdirs[1].get_diagnostics()['magicc_dt_per_dt']
        assert dt1 < 0.7 * dt2

        dp1 = gdirs[0].get_diagnostics()['magicc_dp_per_dt']
        dp2 = gdirs[1].get_diagnostics()['magicc_dp_per_dt']
        assert dp1 < dp2
        assert dp2 < 0.2

        workflow.execute_entity_task(parse_dt_per_dt, gdirs, monthly=True)

        dt1m = np.asarray(gdirs[0].get_diagnostics()['magicc_dt_per_dt'])
        dt2m = np.asarray(gdirs[1].get_diagnostics()['magicc_dt_per_dt'])
        assert_allclose(dt1m.mean(), dt1, atol=1e-3)
        assert_allclose(dt2m.mean(), dt2, atol=1e-3)

        dp1m = np.asarray(gdirs[0].get_diagnostics()['magicc_dp_per_dt'])
        dp2m = np.asarray(gdirs[1].get_diagnostics()['magicc_dp_per_dt'])
        # This is not always so the same because of averaging months with
        # different precips
        assert_allclose(dp1m.mean(), dp1, atol=3e-2)
        assert_allclose(dp2m.mean(), dp2, atol=3e-2)
    def test_varying_bias_workflow(self, case_dir):

        cfg.initialize()
        cfg.PARAMS['prcp_scaling_factor'] = 1.6
        cfg.PATHS['working_dir'] = case_dir
        cfg.PARAMS['use_multiprocessing'] = True

        # Go - get the pre-processed glacier directories
        gdirs = workflow.init_glacier_directories(
            ['RGI60-11.00897'],
            from_prepro_level=5,
            prepro_base_url=prepro_base_url,
            prepro_border=80,
            prepro_rgi_version='62')
        workflow.execute_entity_task(parse_dt_per_dt, gdirs)
        gdir = gdirs[0]

        exp = 'netzero_py2020_fac1.0_decr0.3'
        magicc_file = magicc_dir + exp + '.nc'
        with xr.open_dataset(utils.file_downloader(magicc_file),
                             decode_times=False) as ds:
            ds = ds.load()
        df = ds['ens_avg'].to_dataframe()

        run_from_magicc_data(gdir,
                             magicc_ts=df['ens_avg'],
                             init_model_filesuffix='_historical',
                             output_filesuffix='_' + exp)

        with xr.open_dataset(
                gdir.get_filepath('model_diagnostics',
                                  filesuffix='_' + exp)) as ds:
            df['vol'] = ds.volume_m3.to_series()
            assert ds.time[0] == 2009
            assert ds.time[-1] == 2301
            assert ds.volume_m3.isel(time=0) > ds.volume_m3.isel(time=-1)
            assert ds.volume_m3.min() < ds.volume_m3.isel(time=-1)
Exemple #16
0
def compute_ref_t_stars(gdirs):
    """ Detects the best t* for the reference glaciers and writes them to disk

    This task will be needed for mass balance calibration of custom climate
    data. For CRU and HISTALP baseline climate a precalibrated list is
    available and should be used instead.

    Parameters
    ----------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        will be filtered for reference glaciers
    """

    if not cfg.PARAMS['run_mb_calibration']:
        raise InvalidParamsError('Are you sure you want to calibrate the '
                                 'reference t*? There is a pre-calibrated '
                                 'version available. If you know what you are '
                                 'doing and still want to calibrate, set the '
                                 '`run_mb_calibration` parameter to `True`.')

    log.info('Compute the reference t* and mu* for WGMS glaciers')

    # Reference glaciers only if in the list and period is good
    ref_gdirs = utils.get_ref_mb_glaciers(gdirs)

    # Run
    from oggm.workflow import execute_entity_task
    out = execute_entity_task(t_star_from_refmb, ref_gdirs)

    # Loop write
    df = pd.DataFrame()
    for gdir, res in zip(ref_gdirs, out):
        # list of mus compatibles with refmb
        rid = gdir.rgi_id
        df.loc[rid, 'lon'] = gdir.cenlon
        df.loc[rid, 'lat'] = gdir.cenlat
        df.loc[rid, 'n_mb_years'] = len(gdir.get_ref_mb_data())
        df.loc[rid, 'tstar'] = res['t_star']
        df.loc[rid, 'bias'] = res['bias']

    # Write out
    df['tstar'] = df['tstar'].astype(int)
    df['n_mb_years'] = df['n_mb_years'].astype(int)
    file = os.path.join(cfg.PATHS['working_dir'], 'ref_tstars.csv')
    df.sort_index().to_csv(file)
Exemple #17
0
    def test_workflow(self):

        # This is a check that the inversion workflow works fine

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
            tasks.process_cru_data,
            tasks.local_mustar,
            tasks.apparent_mb,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        df = utils.glacier_characteristics(gdirs)
        assert df.inv_thickness_m[0] < 100

        if do_plot:
            import matplotlib.pyplot as plt
            from oggm.graphics import plot_inversion
            plot_inversion(gdirs)
            plt.show()
Exemple #18
0
    def test_workflow(self):

        # This is a check that the inversion workflow works fine

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
            tasks.process_cru_data,
            tasks.local_t_star,
            tasks.mu_star_calibration,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        df = utils.compile_glacier_statistics(gdirs)
        assert df.inv_thickness_m[0] < 100

        if do_plot:
            import matplotlib.pyplot as plt
            from oggm.graphics import plot_inversion
            plot_inversion(gdirs)
            plt.show()
Exemple #19
0
def test_chhota_shigri():

    testdir = os.path.join(get_test_dir(), 'tmp_chhota')
    utils.mkdir(testdir, reset=True)

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_chhota_shigri.tif')
    cfg.PARAMS['border'] = 80
    cfg.PARAMS['use_intersects'] = False
    cfg.PATHS['working_dir'] = testdir

    hef_file = get_demo_file('divides_RGI50-14.15990.shp')
    df = gpd.read_file(hef_file)
    df['Area'] = df.Area * 1e-6  # cause it was in m2
    df['RGIId'] = ['RGI50-14.15990' + d for d in ['_d01', '_d02']]

    gdirs = workflow.init_glacier_regions(df)
    workflow.gis_prepro_tasks(gdirs)
    for gdir in gdirs:
        climate.apparent_mb_from_linear_mb(gdir)
    workflow.execute_entity_task(inversion.prepare_for_inversion, gdirs)
    workflow.execute_entity_task(inversion.volume_inversion,
                                 gdirs,
                                 glen_a=cfg.A,
                                 fs=0)
    workflow.execute_entity_task(inversion.filter_inversion_output, gdirs)
    workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)

    models = []
    for gdir in gdirs:
        flowline.init_present_time_glacier(gdir)
        fls = gdir.read_pickle('model_flowlines')
        models.append(flowline.FlowlineModel(fls))

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdirs, ax=ax, model=models)
    fig.tight_layout()
    shutil.rmtree(testdir)
    return fig
Exemple #20
0
def calibration(gdirs, xval, major=0):
    # Climate tasks

    if mbcfg.PARAMS['histalp']:
        cfg.PATHS['climate_file'] = mbcfg.PATHS['histalpfile']
        execute_entity_task(tasks.process_custom_climate_data, gdirs)
    else:
        execute_entity_task(tasks.process_cru_data, gdirs)

    with utils.DisableLogger():
        tasks.compute_ref_t_stars(gdirs)
        tasks.distribute_t_stars(gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

    # do the crossvalidation
    xval = quick_crossval(gdirs, xval, major=major)

    return xval
cfg.PARAMS['border'] = 160

# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = True
cfg.PARAMS['auto_skip_task'] = False

# Get the RGI file
rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg, version=rgi_version))

# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

# Module logger
log = logging.getLogger(__name__)
log.info('Starting run for RGI reg: ' + rgi_reg)
log.info('Number of glaciers: {}'.format(len(rgidf)))

# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf, from_prepro_level=0)

# Tasks
workflow.execute_entity_task(tasks.glacier_masks, gdirs)

# End - compress all
workflow.execute_entity_task(utils.gdir_to_tar, gdirs)

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.info("OGGM is done! Time needed: %02d:%02d:%02d" % (h, m, s))
Exemple #22
0
# Set to True for operational runs
cfg.PARAMS['continue_on_error'] = True
cfg.PARAMS['auto_skip_task'] = False

# We use intersects
rgif = utils.get_rgi_intersects_region_file(rgi_reg, version=rgi_version)
cfg.set_intersects_db(rgif)

# Get the RGI file
rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg, version=rgi_version))

# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

# Module logger
log = logging.getLogger(__name__)
log.info('Starting run for RGI reg: ' + rgi_reg)
log.info('Number of glaciers: {}'.format(len(rgidf)))

# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf)

# End - compress all
workflow.execute_entity_task(utils.gdir_to_tar, gdirs)

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.info("OGGM is done! Time needed: %02d:%02d:%02d" % (h, m, s))
Exemple #23
0
    def test_random(self):

        # Fake Reset (all these tests are horribly coded)
        if not os.path.exists(TEST_DIR):
            os.makedirs(TEST_DIR)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
        gdirs = up_to_inversion(reset=False)

        # First tests
        df = utils.compile_glacier_statistics(gdirs)
        df['volume_before_calving_km3'] = df['volume_before_calving'] * 1e-9
        assert np.sum(~df.volume_before_calving.isnull()) == 2
        dfs = df.iloc[:2]
        assert np.all(dfs['volume_before_calving_km3'] < dfs['inv_volume_km3'])
        assert_allclose(df['inv_flowline_glacier_area'] * 1e-6,
                        df['rgi_area_km2'])

        workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
        # Check init_present_time_glacier not messing around too much
        for gd in gdirs:
            from oggm.core.massbalance import LinearMassBalance
            from oggm.core.flowline import FluxBasedModel
            mb_mod = LinearMassBalance(ela_h=2500)
            fls = gd.read_pickle('model_flowlines')
            model = FluxBasedModel(fls, mb_model=mb_mod)
            df.loc[gd.rgi_id, 'start_area_km2'] = model.area_km2
            df.loc[gd.rgi_id, 'start_volume_km3'] = model.volume_km3
            df.loc[gd.rgi_id, 'start_length'] = model.length_m
        assert_allclose(df['rgi_area_km2'], df['start_area_km2'], rtol=0.01)
        assert_allclose(df['rgi_area_km2'].sum(),
                        df['start_area_km2'].sum(),
                        rtol=0.005)
        assert_allclose(df['inv_volume_km3'], df['start_volume_km3'])
        assert_allclose(df['inv_volume_km3'].sum(),
                        df['start_volume_km3'].sum())
        assert_allclose(df['main_flowline_length'], df['start_length'])

        workflow.execute_entity_task(flowline.run_random_climate,
                                     gdirs,
                                     nyears=100,
                                     seed=0,
                                     store_monthly_step=True,
                                     output_filesuffix='_test')

        for gd in gdirs:
            path = gd.get_filepath('model_run', filesuffix='_test')
            # See that we are running ok
            with flowline.FileModel(path) as model:
                vol = model.volume_km3_ts()
                area = model.area_km2_ts()
                length = model.length_m_ts()

                self.assertTrue(np.all(np.isfinite(vol) & vol != 0.))
                self.assertTrue(np.all(np.isfinite(area) & area != 0.))
                self.assertTrue(np.all(np.isfinite(length) & length != 0.))

            ds_diag = gd.get_filepath('model_diagnostics', filesuffix='_test')
            ds_diag = xr.open_dataset(ds_diag)
            df = vol.to_frame('RUN')
            df['DIAG'] = ds_diag.volume_m3.to_series() * 1e-9
            assert_allclose(df.RUN, df.DIAG)
            df = area.to_frame('RUN')
            df['DIAG'] = ds_diag.area_m2.to_series() * 1e-6
            assert_allclose(df.RUN, df.DIAG)
            df = length.to_frame('RUN')
            df['DIAG'] = ds_diag.length_m.to_series()
            assert_allclose(df.RUN, df.DIAG)

        # Test output
        ds = utils.compile_run_output(gdirs, input_filesuffix='_test')
        assert_allclose(ds_diag.volume_m3, ds.volume.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.area_m2, ds.area.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.length_m, ds.length.sel(rgi_id=gd.rgi_id))
        df = ds.volume.sel(rgi_id=gd.rgi_id).to_series().to_frame('OUT')
        df['RUN'] = ds_diag.volume_m3.to_series()
        assert_allclose(df.RUN, df.OUT)

        # Compare to statistics
        df = utils.compile_glacier_statistics(gdirs)
        df['y0_vol'] = ds.volume.sel(rgi_id=df.index, time=0) * 1e-9
        df['y0_area'] = ds.area.sel(rgi_id=df.index, time=0) * 1e-6
        df['y0_len'] = ds.length.sel(rgi_id=df.index, time=0)
        assert_allclose(df['rgi_area_km2'], df['y0_area'], 0.06)
        assert_allclose(df['inv_volume_km3'], df['y0_vol'], 0.04)
        assert_allclose(df['main_flowline_length'], df['y0_len'])

        # Calving stuff
        assert ds.isel(rgi_id=0).calving[-1] > 0
        assert ds.isel(rgi_id=0).calving_rate[-1] > 0
        assert ds.isel(rgi_id=0).volume_bsl[-1] == 0
        assert ds.isel(rgi_id=0).volume_bwl[-1] > 0
        assert ds.isel(rgi_id=1).calving[-1] > 0
        assert ds.isel(rgi_id=1).calving_rate[-1] > 0
        assert not np.isfinite(ds.isel(rgi_id=1).volume_bsl[-1])
Exemple #24
0
    def test_mb(self):

        # This is a function to produce the MB function needed by Anna

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.distribute_t_stars(gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

        mbref = salem.GeoTiff(get_demo_file('mb_SouthGlacier.tif'))
        demref = salem.GeoTiff(get_demo_file('dem_SouthGlacier.tif'))

        mbref = mbref.get_vardata()
        mbref[mbref == -9999] = np.NaN
        demref = demref.get_vardata()[np.isfinite(mbref)]
        mbref = mbref[np.isfinite(mbref)] * 1000

        # compute the bias to make it 0 SMB on the 2D DEM
        mbmod = ConstantMassBalance(gdirs[0], bias=0)
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * cfg.RHO
        mbmod = ConstantMassBalance(gdirs[0], bias=np.average(mymb))
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * cfg.RHO
        np.testing.assert_allclose(np.average(mymb), 0., atol=1e-3)

        # Same for ref
        mbref = mbref - np.average(mbref)
        np.testing.assert_allclose(np.average(mbref), 0., atol=1e-3)

        # Fit poly
        p = np.polyfit(demref, mbref, deg=2)
        poly = np.poly1d(p)
        myfit = poly(demref)
        np.testing.assert_allclose(np.average(myfit), 0., atol=1e-3)

        if do_plot:
            import matplotlib.pyplot as plt
            plt.scatter(mbref, demref, s=5, label='Obs (2007-2012), shifted to '
                                                   'Avg(SMB) = 0')
            plt.scatter(mymb, demref, s=5, label='OGGM MB at t*')
            plt.scatter(myfit, demref, s=5, label='Polyfit', c='C3')
            plt.xlabel('MB (mm w.e yr-1)')
            plt.ylabel('Altidude (m)')
            plt.legend()
            plt.show()
Exemple #25
0
def run_prepro_levels(rgi_version=None,
                      rgi_reg=None,
                      border=None,
                      output_folder='',
                      working_dir='',
                      dem_source='',
                      is_test=False,
                      demo=False,
                      test_rgidf=None,
                      test_intersects_file=None,
                      test_topofile=None,
                      test_crudir=None,
                      disable_mp=False,
                      timeout=0,
                      max_level=4,
                      logging_level='WORKFLOW'):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    dem_source : str
        which DEM source to use: default, SOURCE_NAME or ALL
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    disable_mp : bool
        disable multiprocessing
    max_level : int
        the maximum pre-processing level before stopping
    logging_level : str
        the logging level to use (DEBUG, INFO, WARNING, WORKFLOW)
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Input check
    if max_level not in [1, 2, 3, 4]:
        raise InvalidParamsError('max_level should be one of [1, 2, 3, 4]')

    # Time
    start = time.time()

    def _time_log():
        # Log util
        m, s = divmod(time.time() - start, 60)
        h, m = divmod(m, 60)
        log.workflow('OGGM prepro_levels is done! Time needed: '
                     '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level=logging_level)

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = not disable_mp

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # Timeout
    cfg.PARAMS['task_timeout'] = timeout

    # For statistics
    climate_periods = [1920, 1960, 2000]

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    rgi_dir_name = 'RGI{}'.format(rgi_version)
    border_dir_name = 'b_{:03d}'.format(border)
    base_dir = os.path.join(output_folder, rgi_dir_name, border_dir_name)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DATA['demo_glaciers'].index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(
            utils.get_rgi_region_file(rgi_reg, version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(4)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # L1 - initialize working directories
    # Which DEM source?
    if dem_source.upper() == 'ALL':
        # This is the complex one, just do the job an leave
        log.workflow('Running prepro on ALL sources')
        for i, s in enumerate(utils.DEM_SOURCES):
            rs = i == 0
            rgidf['DEM_SOURCE'] = s
            log.workflow('Running prepro on sources: {}'.format(s))
            gdirs = workflow.init_glacier_regions(rgidf, reset=rs, force=rs)
            workflow.execute_entity_task(_rename_dem_folder, gdirs, source=s)

        # Compress all in output directory
        l_base_dir = os.path.join(base_dir, 'L1')
        workflow.execute_entity_task(utils.gdir_to_tar,
                                     gdirs,
                                     delete=False,
                                     base_dir=l_base_dir)
        utils.base_dir_to_tar(l_base_dir)

        _time_log()
        return

    if dem_source:
        # Force a given source
        rgidf['DEM_SOURCE'] = dem_source.upper()

    # L1 - go
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L1', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L1 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L1')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 1:
        _time_log()
        return

    # L2 - Tasks
    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    workflow.execute_entity_task(tasks.process_cru_data, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L2', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L2 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L2')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 2:
        _time_log()
        return

    # L3 - Tasks
    task_list = [
        tasks.glacier_masks, tasks.compute_centerlines,
        tasks.initialize_flowlines, tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape, tasks.catchment_area,
        tasks.catchment_intersections, tasks.catchment_width_geom,
        tasks.catchment_width_correction, tasks.local_t_star,
        tasks.mu_star_calibration, tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion, tasks.filter_inversion_output,
        tasks.init_present_time_glacier
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=climate_periods,
                                     path=opath)

    # L3 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar,
                                 gdirs,
                                 delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)
    if max_level == 3:
        _time_log()
        return

    # L4 - No tasks: add some stats for consistency and make the dirs small
    sum_dir = os.path.join(base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Copy mini data to new dir
    base_dir = os.path.join(base_dir, 'L4')
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir,
                                              gdirs,
                                              base_dir=base_dir)

    # L4 OK - compress all in output directory
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=True)
    utils.base_dir_to_tar(base_dir)

    _time_log()
Exemple #26
0
def up_to_inversion(reset=False):
    """Run the tasks you want."""

    # test directory
    if not os.path.exists(TEST_DIR):
        os.makedirs(TEST_DIR)
    if reset:
        clean_dir(TEST_DIR)

    # Init
    cfg.initialize()

    # Use multiprocessing
    cfg.PARAMS['use_multiprocessing'] = not ON_TRAVIS

    # Working dir
    cfg.PATHS['working_dir'] = TEST_DIR

    cfg.PATHS['dem_file'] = get_demo_file('srtm_oetztal.tif')

    # Set up the paths and other stuffs
    cfg.set_divides_db(get_demo_file('divides_workflow.shp'))
    cfg.PATHS['wgms_rgi_links'] = get_demo_file('RGI_WGMS_oetztal.csv')
    cfg.PATHS['glathida_rgi_links'] = get_demo_file('RGI_GLATHIDA_oetztal.csv')

    # Read in the RGI file
    rgi_file = get_demo_file('rgi_oetztal.shp')
    rgidf = gpd.GeoDataFrame.from_file(rgi_file)

    # Be sure data is downloaded because lock doesn't work
    cl = utils.get_cru_cl_file()

    # Params
    cfg.PARAMS['border'] = 70
    cfg.PARAMS['use_optimized_inversion_params'] = True

    # Go
    gdirs = workflow.init_glacier_regions(rgidf)

    try:
        flowline.init_present_time_glacier(gdirs[0])
    except Exception:
        reset = True

    if reset:
        # First preprocessing tasks
        workflow.gis_prepro_tasks(gdirs)

        # Climate related tasks
        # See if CRU is running
        cfg.PARAMS['temp_use_local_gradient'] = False
        cfg.PATHS['climate_file'] = '~'
        cru_dir = get_demo_file('cru_ts3.23.1901.2014.tmp.dat.nc')
        cfg.PATHS['cru_dir'] = os.path.dirname(cru_dir)
        with warnings.catch_warnings():
            # There is a warning from salem
            warnings.simplefilter("ignore")
            workflow.execute_entity_task(tasks.distribute_cru_style, gdirs)
        tasks.compute_ref_t_stars(gdirs)
        tasks.distribute_t_stars(gdirs)

        # Use histalp for the actual test
        cfg.PARAMS['temp_use_local_gradient'] = True
        cfg.PATHS['climate_file'] = get_demo_file('HISTALP_oetztal.nc')
        cfg.PATHS['cru_dir'] = '~'
        workflow.climate_tasks(gdirs)

        # Inversion
        workflow.inversion_tasks(gdirs)

    return gdirs
Exemple #27
0
# Go - initialize working directories
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Climate related tasks - this will download
tasks.distribute_climate_data(gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)

# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)

# Write out glacier statistics
df = utils.glacier_characteristics(gdirs)
fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv')
df.to_csv(fpath)
Exemple #28
0
        df1 = gcms.loc[gcms.gcm == gcm]
        for ssp in df1.ssp.unique():
            df2 = df1.loc[df1.ssp == ssp]
            assert len(df2) == 2
            ft = df2.loc[df2['var'] == 'tas'].iloc[0]
            fp = df2.loc[df2['var'] == 'pr'].iloc[0].path
            rid = ft.fname.replace('_r1i1p1f1_tas.nc', '')
            ft = ft.path

            log.workflow('Starting run for {}'.format(rid))

            workflow.execute_entity_task(
                gcm_climate.process_cmip_data,
                gdirs,
                filesuffix='_' + rid,
                # recognize the climate file for later
                fpath_temp=ft,
                # temperature projections
                fpath_precip=fp,  # precip projections
                year_range=('1981', '2018'),
            )
            workflow.execute_entity_task(
                tasks.run_from_climate_data,
                gdirs,
                climate_filename='gcm_data',
                # use gcm_data, not climate_historical
                climate_input_filesuffix='_' + rid,
                # use a different scenario
                init_model_filesuffix='_historical',
                # this is important! Start from 2019 glacier
                output_filesuffix=rid,
                # recognize the run for later
glac_type = [0, 2]
keep_glactype = [(i not in glac_type) for i in rgidf.TermType]
rgidf = rgidf.iloc[keep_glactype]

# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

log.info('Starting run for RGI reg: ' + rgi_region)
log.info('Number of glaciers: {}'.format(len(rgidf)))

# Go - initialize working directories
# -----------------------------------
gdirs = workflow.init_glacier_regions(rgidf)

if RUN_GIS_mask:
    execute_entity_task(tasks.glacier_masks, gdirs)

#We copy Columbia glacier dir with the itmix dem
shutil.rmtree(
    os.path.join(WORKING_DIR,
                 'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))
shutil.copytree(
    Columbia_dir,
    os.path.join(WORKING_DIR,
                 'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))

# Pre-processing tasks
task_list = [
    tasks.compute_centerlines,
    tasks.initialize_flowlines,
    tasks.catchment_area,
Exemple #30
0
rgidf = rgidf.loc[in_bas]

# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

log.workflow('Starting OGGM run')
log.workflow('Number of glaciers: {}'.format(len(rgidf)))

# Go - get the pre-processed glacier directories
gdirs = workflow.init_glacier_regions(rgidf, from_prepro_level=4)

# We can step directly to a new experiment!
# Random climate representative for the recent climate (1985-2015)
# This is a kind of "commitment" run
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                             nyears=300, y0=2000, seed=1,
                             output_filesuffix='_commitment')
# Now we add a positive and a negative bias to the random temperature series
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                             nyears=300, y0=2000, seed=2,
                             temperature_bias=0.5,
                             output_filesuffix='_bias_p')
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                             nyears=300, y0=2000, seed=3,
                             temperature_bias=-0.5,
                             output_filesuffix='_bias_m')

# Write the compiled output
utils.compile_glacier_statistics(gdirs)
utils.compile_run_output(gdirs, filesuffix='_commitment')
utils.compile_run_output(gdirs, filesuffix='_bias_p')
Exemple #31
0
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    # Climate related tasks
    # see if we can distribute
    workflow.execute_entity_task(tasks.distribute_cru_style, gdirs)
    tasks.compute_ref_t_stars(gdirs)
    tasks.distribute_t_stars(gdirs)

if RUN_INVERSION:
    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    tasks.optimize_inversion_params(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs)

if RUN_DYNAMICS:
Exemple #32
0
# Default parameters
# Deformation: from Cuffey and Patterson 2010
glen_a = 2.4e-24
# Sliding: from Oerlemans 1997
fs = 5.7e-20

# Correction factors
factors = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]
factors += [1.1, 1.2, 1.3, 1.5, 1.7, 2, 2.5, 3, 4, 5]
factors += [6, 7, 8, 9, 10]

# Run the inversions tasks with the given factors
for f in factors:
    # Without sliding
    suf = '_{:03d}_without_fs'.format(int(f * 10))
    workflow.execute_entity_task(tasks.mass_conservation_inversion, gdirs,
                                 glen_a=glen_a*f, fs=0)
    workflow.execute_entity_task(tasks.filter_inversion_output, gdirs)
    # Store the results of the inversion only
    utils.compile_glacier_statistics(gdirs, filesuffix=suf,
                                     inversion_only=True)

    # With sliding
    suf = '_{:03d}_with_fs'.format(int(f * 10))
    workflow.execute_entity_task(tasks.mass_conservation_inversion, gdirs,
                                 glen_a=glen_a*f, fs=fs)
    workflow.execute_entity_task(tasks.filter_inversion_output, gdirs)
    # Store the results of the inversion only
    utils.compile_glacier_statistics(gdirs, filesuffix=suf,
                                     inversion_only=True)

# Log
    rids = [rid for rid in rids if '-11.' in rid]

# Make a new dataframe with those (this takes a while)
log.info('Reading the RGI shapefiles...')
rgidf = utils.get_rgi_glacier_entities(rids, version=rgi_version)
log.info('For RGIV{} we have {} candidate reference '
         'glaciers.'.format(rgi_version, len(rgidf)))

# We have to check which of them actually have enough mb data.
# Let OGGM do it:
gdirs = workflow.init_glacier_regions(rgidf)

# We need to know which period we have data for
log.info('Process the climate data...')
if baseline == 'CRU':
    execute_entity_task(tasks.process_cru_data, gdirs, print_log=False)
elif baseline == 'HISTALP':
    cfg.PARAMS['continue_on_error'] = True  # Some glaciers are not in Alps
    execute_entity_task(tasks.process_histalp_data, gdirs, print_log=False)
    cfg.PARAMS['continue_on_error'] = False

gdirs = utils.get_ref_mb_glaciers(gdirs)

# Keep only these
rgidf = rgidf.loc[rgidf.RGIId.isin([g.rgi_id for g in gdirs])]

# Save
log.info('For RGIV{} and {} we have {} reference glaciers.'.format(rgi_version,
                                                                   baseline,
                                                                   len(rgidf)))
rgidf.to_file(os.path.join(WORKING_DIR, 'mb_ref_glaciers.shp'))
Exemple #34
0
        Columbia_itmix = os.path.join(DATA_DIR,
                                      'RGI50-01.10689_itmixrun_new/dem.tif')

    if RUN_inAWS:
        #TODO this path will change in new cluster
        Columbia_path = os.path.join(WORKING_DIR,
            'per_glacier/RGI50-01/RGI50-01.10/RGI50-01.10689/')
        filename = os.path.join(Columbia_path,'dem.tif')
        Columbia_itmix = os.path.join(DATA_DIR,
            'RGI50-01.10689_itmixrun_new/dem.tif')

    os.remove(filename)
    shutil.copy(Columbia_itmix,Columbia_path)

    #Calculate glacier masks
    execute_entity_task(tasks.glacier_masks, gdirs)

# Then the rest
task_list = [
   tasks.compute_centerlines,
   tasks.compute_downstream_lines,
   tasks.catchment_area,
   tasks.initialize_flowlines,
   tasks.catchment_width_geom,
   tasks.catchment_width_correction
]

if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)
Exemple #35
0
# For calibration
if do_calib:
    # gdirs = [gd for gd in gdirs if gd.glacier_type != 'Ice cap']
    # gdirs = [gd for gd in gdirs if gd.terminus_type == 'Land-terminating']

    # Basic tasks
    task_list = [
        itmix.glacier_masks_itmix,
        tasks.compute_centerlines,
        tasks.catchment_area,
        tasks.initialize_flowlines,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction
    ]
    for task in task_list:
        execute_entity_task(task, gdirs)

    # "Climate related tasks"
    for gd in gdirs:
        itmix.synth_apparent_mb(gd)

    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    fac = 3.22268124479468
    use_cfg_params = {'glen_a':fac * cfg.A, 'fs':0.}
    for gd in gdirs:
        tasks.volume_inversion(gd, use_cfg_params=use_cfg_params)


if do_itmix:
Exemple #36
0
cfg.PARAMS['continue_on_error'] = True

# Local working directory (where OGGM will write its output)
WORKING_DIR = utils.gettempdir('OGGM_Errors')
utils.mkdir(WORKING_DIR, reset=True)
cfg.PATHS['working_dir'] = WORKING_DIR

rgi_ids = ['RGI60-11.00897', 'RGI60-11.01450', 'RGI60-11.03295']

log.workflow('Starting OGGM run')
log.workflow('Number of glaciers: {}'.format(len(rgi_ids)))

# Go - get the pre-processed glacier directories
gdirs = workflow.init_glacier_regions(rgi_ids, from_prepro_level=4)

# We can step directly to the experiment!
# Random climate representative for the recent climate (1985-2015)
# with a negative bias added to the random temperature series
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                             nyears=150, seed=0,
                             temperature_bias=-1)

# Write the compiled output
utils.compile_glacier_statistics(gdirs)
utils.compile_run_output(gdirs)

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.workflow('OGGM is done! Time needed: %d:%02d:%02d' % (h, m, s))
Exemple #37
0
    def test_mb(self):

        # This is a function to produce the MB function needed by Anna

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
            tasks.process_cru_data,
            tasks.local_t_star,
            tasks.mu_star_calibration,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        mbref = salem.GeoTiff(get_demo_file('mb_SouthGlacier.tif'))
        demref = salem.GeoTiff(get_demo_file('dem_SouthGlacier.tif'))

        mbref = mbref.get_vardata()
        mbref[mbref == -9999] = np.NaN
        demref = demref.get_vardata()[np.isfinite(mbref)]
        mbref = mbref[np.isfinite(mbref)] * 1000

        # compute the bias to make it 0 SMB on the 2D DEM
        rho = cfg.PARAMS['ice_density']
        mbmod = ConstantMassBalance(gdirs[0], bias=0)
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * rho
        mbmod = ConstantMassBalance(gdirs[0], bias=np.average(mymb))
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * rho
        np.testing.assert_allclose(np.average(mymb), 0., atol=1e-3)

        # Same for ref
        mbref = mbref - np.average(mbref)
        np.testing.assert_allclose(np.average(mbref), 0., atol=1e-3)

        # Fit poly
        p = np.polyfit(demref, mbref, deg=2)
        poly = np.poly1d(p)
        myfit = poly(demref)
        np.testing.assert_allclose(np.average(myfit), 0., atol=1e-3)

        if do_plot:
            import matplotlib.pyplot as plt
            plt.scatter(mbref, demref, s=5,
                        label='Obs (2007-2012), shifted to Avg(SMB) = 0')
            plt.scatter(mymb, demref, s=5, label='OGGM MB at t*')
            plt.scatter(myfit, demref, s=5, label='Polyfit', c='C3')
            plt.xlabel('MB (mm w.e yr-1)')
            plt.ylabel('Altidude (m)')
            plt.legend()
            plt.show()
Exemple #38
0
def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
                      output_folder='', working_dir='', is_test=False,
                      demo=False, test_rgidf=None, test_intersects_file=None,
                      test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Time
    start = time.time()

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    climate_periods = [1920, 1960, 2000]

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    rgi_dir_name = 'RGI{}'.format(rgi_version)
    border_dir_name = 'b_{:03d}'.format(border)
    base_dir = os.path.join(output_folder, rgi_dir_name, border_dir_name)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DEMO_GLACIERS.index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(4)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # L1 - initialize working directories
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L1', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L1 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L1')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L2 - Tasks
    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    workflow.execute_entity_task(tasks.process_cru_data, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L2', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L2 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L2')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L3 - Tasks
    task_list = [
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs, add_climate_period=climate_periods,
                                     path=opath)

    # L3 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L4 - Tasks
    workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Copy mini data to new dir
    base_dir = os.path.join(base_dir, 'L4')
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir, gdirs,
                                              base_dir=base_dir)

    # L4 OK - compress all in output directory
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=True)
    utils.base_dir_to_tar(base_dir)

    # Log
    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.workflow('OGGM prepro_levels is done! Time needed: '
                 '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))
Exemple #39
0
# For calibration
if do_calib:
    # gdirs = [gd for gd in gdirs if gd.glacier_type != 'Ice cap']
    # gdirs = [gd for gd in gdirs if gd.terminus_type == 'Land-terminating']

    # Basic tasks
    task_list = [
        itmix.glacier_masks_itmix,
        tasks.compute_centerlines,
        tasks.catchment_area,
        tasks.initialize_flowlines,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction
    ]
    for task in task_list:
        execute_entity_task(task, gdirs)

    # Climate related tasks
    execute_entity_task(tasks.process_cru_data, gdirs)
    tasks.compute_ref_t_stars(gdirs)
    tasks.distribute_t_stars(gdirs)

    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    itmix.optimize_thick(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs)

    # Write out glacier statistics
    df = utils.glacier_characteristics(gdirs)
    fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv')
    df.to_csv(fpath)
Exemple #40
0
def init_hef(reset=False, border=40):

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize()
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['baseline_climate'] = ''
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity=entity)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
    climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias'])
    climate.mu_star_calibration(gdir)

    inversion.prepare_for_inversion(gdir, add_debug_var=True)

    ref_v = 0.573 * 1e9

    glen_n = cfg.PARAMS['glen_n']

    def to_optimize(x):
        # For backwards compat
        _fd = 1.9e-24 * x[0]
        glen_a = (glen_n+2) * _fd / 2.
        fs = 5.7e-20 * x[1]
        v, _ = inversion.mass_conservation_inversion(gdir, fs=fs,
                                                     glen_a=glen_a)
        return (v - ref_v)**2

    out = optimization.minimize(to_optimize, [1, 1],
                                bounds=((0.01, 10), (0.01, 10)),
                                tol=1e-4)['x']
    _fd = 1.9e-24 * out[0]
    glen_a = (glen_n+2) * _fd / 2.
    fs = 5.7e-20 * out[1]
    v, _ = inversion.mass_conservation_inversion(gdir, fs=fs,
                                                 glen_a=glen_a,
                                                 write=True)

    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    d['factor_fs'] = out[1]
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
Exemple #41
0
#cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-GettingStarted', reset=True)

# glacier directory
gdirs = workflow.init_glacier_directories(
    entity
)  #, prepro_base_url=base_url) # <- from prepro_level uses preprocessed files
#rgi_id = gdir.rgi_id
#gdir.rgi_date = outline_year

# update gdir outline year
for gdir in gdirs:
    gdir.rgi_date = outline_year

# glacier region
#tasks.define_glacier_region(gdir, source='USER') # set user dem, this works for single glacier
workflow.execute_entity_task(tasks.define_glacier_region, gdirs,
                             source='USER')  # when using multiple glaciers

# plot glacier
#graphics.plot_domain(gdirs[0])

# print gdir files
#print(os.listdir(gdirs[0].dir))

# create glacier mask
#workflow.execute_entity_task(tasks.glacier_masks, gdirs)

# tasks to be executed
list_tasks = [
    tasks.glacier_masks, tasks.compute_centerlines, tasks.initialize_flowlines,
    tasks.catchment_area, tasks.catchment_width_geom,
    tasks.catchment_width_correction, tasks.compute_downstream_line,
Exemple #42
0
    def test_inversion(self):

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
            tasks.process_cru_data,
            tasks.local_t_star,
            tasks.mu_star_calibration,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.distribute_thickness_per_altitude, gdirs,
                            varname_suffix='_alt')
        execute_entity_task(tasks.distribute_thickness_interp, gdirs,
                            varname_suffix='_int')

        # Reference data
        gdir = gdirs[0]
        df = self.get_ref_data(gdir)

        with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:

            v = ds.distributed_thickness_alt
            df['oggm_alt'] = v.isel(x=('z', df['i']), y=('z', df['j']))
            v = ds.distributed_thickness_int
            df['oggm_int'] = v.isel(x=('z', df['i']), y=('z', df['j']))

            ds['ref'] = xr.zeros_like(ds.distributed_thickness_int) * np.NaN
            ds['ref'].data[df['j'], df['i']] = df['thick']

        rmsd_int = ((df.oggm_int - df.thick) ** 2).mean() ** .5
        rmsd_alt = ((df.oggm_int - df.thick) ** 2).mean() ** .5
        assert rmsd_int < 80
        assert rmsd_alt < 80

        dfm = df.mean()
        np.testing.assert_allclose(dfm.thick, dfm.oggm_int, 50)
        np.testing.assert_allclose(dfm.thick, dfm.oggm_alt, 50)

        if do_plot:
            import matplotlib.pyplot as plt
            df.plot(kind='scatter', x='oggm_int', y='thick')
            plt.axis('equal')
            df.plot(kind='scatter', x='oggm_alt', y='thick')
            plt.axis('equal')
            f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(12, 3))
            ds.ref.plot(ax=ax1)
            ds.distributed_thickness_int.plot(ax=ax2)
            ds.distributed_thickness_alt.plot(ax=ax3)
            plt.tight_layout()
            plt.show()
Exemple #43
0
def init_hef(reset=False, border=40, logging_level='INFO'):

    from oggm.core import gis, inversion, climate, centerlines, flowline
    import geopandas as gpd

    # test directory
    testdir = os.path.join(get_test_dir(), 'tmp_border{}'.format(border))
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True

    # Init
    cfg.initialize(logging_level=logging_level)
    cfg.set_intersects_db(get_demo_file('rgi_intersect_oetztal.shp'))
    cfg.PATHS['dem_file'] = get_demo_file('hef_srtm.tif')
    cfg.PATHS['climate_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.PARAMS['baseline_climate'] = ''
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['border'] = border

    hef_file = get_demo_file('Hintereisferner_RGI5.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, reset=reset)
    if not gdir.has_file('inversion_params'):
        reset = True
        gdir = oggm.GlacierDirectory(entity, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir)
    execute_entity_task(gis.glacier_masks, [gdir])
    execute_entity_task(centerlines.compute_centerlines, [gdir])
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.process_custom_climate_data(gdir)
    mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
    res = climate.t_star_from_refmb(gdir, mbdf=mbdf)
    climate.local_t_star(gdir, tstar=res['t_star'], bias=res['bias'])
    climate.mu_star_calibration(gdir)

    inversion.prepare_for_inversion(gdir, add_debug_var=True)

    ref_v = 0.573 * 1e9

    glen_n = cfg.PARAMS['glen_n']

    def to_optimize(x):
        # For backwards compat
        _fd = 1.9e-24 * x[0]
        glen_a = (glen_n + 2) * _fd / 2.
        fs = 5.7e-20 * x[1]
        v, _ = inversion.mass_conservation_inversion(gdir,
                                                     fs=fs,
                                                     glen_a=glen_a)
        return (v - ref_v)**2

    out = optimization.minimize(to_optimize, [1, 1],
                                bounds=((0.01, 10), (0.01, 10)),
                                tol=1e-4)['x']
    _fd = 1.9e-24 * out[0]
    glen_a = (glen_n + 2) * _fd / 2.
    fs = 5.7e-20 * out[1]
    v, _ = inversion.mass_conservation_inversion(gdir,
                                                 fs=fs,
                                                 glen_a=glen_a,
                                                 write=True)

    d = dict(fs=fs, glen_a=glen_a)
    d['factor_glen_a'] = out[0]
    d['factor_fs'] = out[1]
    gdir.write_pickle(d, 'inversion_params')

    # filter
    inversion.filter_inversion_output(gdir)

    inversion.distribute_thickness_interp(gdir, varname_suffix='_interp')
    inversion.distribute_thickness_per_altitude(gdir, varname_suffix='_alt')

    flowline.init_present_time_glacier(gdir)

    return gdir
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.initialize_flowlines,
    tasks.compute_downstream_bedshape,
    tasks.catchment_area,
    tasks.catchment_intersections,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction,
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Climate tasks
execute_entity_task(tasks.process_cru_data, gdirs)
# tasks.quick_crossval_t_stars(gdirs)
# tasks.compute_ref_t_stars(gdirs)
# tasks.distribute_t_stars(gdirs)

# Model validation
# ----------------

# Tests: for all glaciers, the mass-balance around tstar and the
# bias with observation should be approx 0
from oggm.core.models.massbalance import (ConstantMassBalanceModel,
                                          PastMassBalanceModel)
for gd in gdirs:
Exemple #45
0
            'solid_prcp_mean_nopf_weighted_{}_{}'.format(dataset, typ),
            'max_allowed_specificMB'
        ]].astype(float)
        # pd_geodetic_comp_alps = pd_geodetic_comp.loc[pd_geodetic_comp.reg==11.0]
        if reset_gdir:
            gdirs = workflow.init_glacier_directories(
                pd_geodetic_comp_alps.dropna().index[start_ind:end_ind].values,
                from_prepro_level=2,
                prepro_border=10,
                prepro_base_url=base_url,
                prepro_rgi_version='62')

            if mb_type != 'mb_real_daily':
                cfg.PARAMS['baseline_climate'] = 'ERA5dr'
                workflow.execute_entity_task(tasks.process_ecmwf_data,
                                             gdirs,
                                             dataset='ERA5dr')
            else:
                cfg.PARAMS['baseline_climate'] = 'ERA5_daily'
                for gd in pd_geodetic_comp_alps.index[start_ind:end_ind]:
                    process_era5_daily_data(gd)

        elif compute_missing:
            #raise NotImplementedError('this has to be adapted')
            path_samples = '/home/users/lschuster/bayesian_calibration/WFDE5_ISIMIP/burned_trace_plus200samples/'
            miss_samples = {}
            exist_samples = {}
            typ = '{}_{}'.format(mb_type, grad_type)
            miss_samples[typ] = []
            exist_samples[typ] = []
Exemple #46
0
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    # Climate related tasks
    # see if we can distribute
    workflow.execute_entity_task(tasks.process_cru_data, gdirs)
    tasks.compute_ref_t_stars(gdirs)
    tasks.distribute_t_stars(gdirs)

if RUN_INVERSION:
    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    tasks.optimize_inversion_params(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs)

if RUN_DYNAMICS:
Exemple #47
0
rgidf = rgidf.loc[~rgidf.RGIId.isin(['RGI50-10.00012', 'RGI50-17.00850',
                                     'RGI50-19.01497', 'RGI50-19.00990',
                                     'RGI50-19.01440'])]

log.info('Number of glaciers: {}'.format(len(rgidf)))

# Go - initialize working directories
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Plots (if you want)
if PLOTS_DIR == '':
    exit()

utils.mkdir(PLOTS_DIR)
for gd in gdirs:

    bname = os.path.join(PLOTS_DIR, gd.rgi_id + '_')
    demsource = ' (' + gd.read_pickle('dem_source') + ')'

    # graphics.plot_googlemap(gd)
    # plt.savefig(bname + 'ggl.png')
    # plt.close()
    graphics.plot_domain(gd, title_comment=demsource)
Exemple #48
0
def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
                  output_folder='', working_dir='', is_test=False,
                  test_rgidf=None, test_intersects_file=None,
                  test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    odf = pd.DataFrame()

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    base_dir = os.path.join(output_folder)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    # Read RGI
    start = time.time()
    if test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(2)
    _add_time_to_df(odf, 'Read RGI', time.time()-start)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # Initialize working directories
    start = time.time()
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
    _add_time_to_df(odf, 'init_glacier_regions', time.time()-start)

    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    # Tasks
    task_list = [
        tasks.process_cru_data,
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
        tasks.init_present_time_glacier,
    ]
    for task in task_list:
        start = time.time()
        workflow.execute_entity_task(task, gdirs)
        _add_time_to_df(odf, task.__name__, time.time()-start)

    # Runs
    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, bias=0, seed=0,
                                 output_filesuffix='_tstar')
    _add_time_to_df(odf, 'run_random_climate_tstar_250', time.time()-start)

    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, y0=1995, seed=0,
                                 output_filesuffix='_commit')
    _add_time_to_df(odf, 'run_random_climate_commit_250', time.time()-start)

    # Compile results
    start = time.time()
    utils.compile_glacier_statistics(gdirs)
    _add_time_to_df(odf, 'compile_glacier_statistics', time.time()-start)

    start = time.time()
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=[1920, 1960, 2000])
    _add_time_to_df(odf, 'compile_climate_statistics', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_tstar')
    _add_time_to_df(odf, 'compile_run_output_tstar', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_commit')
    _add_time_to_df(odf, 'compile_run_output_commit', time.time()-start)

    # Log
    opath = os.path.join(base_dir, 'benchmarks_b{:03d}.csv'.format(border))
    odf.index.name = 'Task'
    odf.to_csv(opath)
    log.workflow('OGGM benchmarks is done!')
Exemple #49
0
        # TODO : Remember to this paths on the cluster to run Columbia
        Columbia_itmix = os.path.join(DATA_INPUT,
                                      'RGI50-01.10689_itmixrun_new/')
        dem_cp = os.path.join(Columbia_itmix, 'dem.tif')
        dem_source_cp = os.path.join(Columbia_itmix, 'dem_source.pkl')
        grid_json_cp = os.path.join(Columbia_itmix, 'glacier_grid.json')

        # This is commented because we only need to replace the DEM once
        # os.remove(filename)
        # os.remove(dem_source)
        # os.remove(grid_json)
        # shutil.copy(dem_cp, filename)
        # shutil.copy(dem_source_cp,dem_source)
        # shutil.copy(grid_json_cp,grid_json)

    execute_entity_task(tasks.glacier_masks, gdirs)

# Pre-processing tasks
task_list = [
    tasks.compute_centerlines,
    tasks.initialize_flowlines,
    tasks.catchment_area,
    tasks.catchment_intersections,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction,
]

if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)
Exemple #50
0
# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 80

# Go - initialize glacier directories
gdirs = workflow.init_glacier_regions(['RGI60-11.00897'], from_prepro_level=4)

# Additional climate file (CESM)
cfg.PATHS['cesm_temp_file'] = get_demo_file('cesm.TREFHT.160001-200512'
                                            '.selection.nc')
cfg.PATHS['cesm_precc_file'] = get_demo_file('cesm.PRECC.160001-200512'
                                             '.selection.nc')
cfg.PATHS['cesm_precl_file'] = get_demo_file('cesm.PRECL.160001-200512'
                                             '.selection.nc')
execute_entity_task(tasks.process_cesm_data, gdirs)

# Run the last 200 years with the default starting point (current glacier)
# and CESM data as input
execute_entity_task(tasks.run_from_climate_data, gdirs,
                    climate_filename='gcm_data',
                    ys=1801, ye=2000,
                    output_filesuffix='_no_spinup')

# Run the spinup simulation - t* climate with a cold temperature bias
execute_entity_task(tasks.run_constant_climate, gdirs,
                    nyears=100, bias=0, temperature_bias=-0.5,
                    output_filesuffix='_spinup')
# Run a past climate run based on this spinup
execute_entity_task(tasks.run_from_climate_data, gdirs,
                    climate_filename='gcm_data',
Exemple #51
0
    thick = cl['thick'][-1]

    w_depth = thick - t_altitude

    # print('t_altitude_fromfun', t_altitude)
    # print('depth_fromfun', w_depth)
    # print('thick_fromfun', thick)
    # print('width', width)
    out = k * thick * w_depth * width / 1e9
    if out < 0:
        out = 0
    return out, w_depth, thick, width

if RUN_GIS_mask:
    execute_entity_task(tasks.glacier_masks, gdirs)

#We copy Columbia glacier dir with the itmix dem
shutil.rmtree(os.path.join(WORKING_DIR,
                           'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))
shutil.copytree(Columbia_dir, os.path.join(WORKING_DIR,
                            'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))

# Pre-processing tasks
task_list = [
    tasks.compute_centerlines,
    tasks.initialize_flowlines,
    tasks.catchment_area,
    tasks.catchment_intersections,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction,
Exemple #52
0
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    # Climate related tasks
    # see if we can distribute
    workflow.execute_entity_task(tasks.distribute_cru_style, gdirs)
    tasks.compute_ref_t_stars(gdirs)
    tasks.distribute_t_stars(gdirs)

if RUN_INVERSION:
    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    tasks.optimize_inversion_params(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs)
    workflow.execute_entity_task(tasks.distribute_thickness, gdirs, how='per_altitude')
exit()

# Prepro tasks
task_list = [
    # tasks.glacier_masks,
    # tasks.compute_centerlines,
    # tasks.compute_downstream_lines,
    # tasks.initialize_flowlines,
    # tasks.compute_downstream_bedshape,
    # tasks.catchment_area,
    # tasks.catchment_intersections,
    # tasks.catchment_width_geom,
    # tasks.catchment_width_correction,
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Climate tasks -- only data preparation and tstar interpolation!
# execute_entity_task(tasks.process_cru_data, gdirs)
# tasks.distribute_t_stars(gdirs)
#
# execute_entity_task(tasks.prepare_for_inversion, gdirs)
# execute_entity_task(tasks.volume_inversion, gdirs,
#                     use_cfg_params={'glen_a': cfg.A, 'fs': 0})
# execute_entity_task(tasks.filter_inversion_output, gdirs)
#
# Tests: for all glaciers, the mass-balance around tstar and the
# bias with observation should be approx 0

from oggm.core.models.massbalance import ConstantMassBalanceModel
for gd in gdirs:
Exemple #54
0
    def test_optimize_inversion(self):

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
            tasks.process_cru_data,
            tasks.local_t_star,
            tasks.mu_star_calibration,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Reference data
        gdir = gdirs[0]
        df = self.get_ref_data(gdir)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)

        glen_a = cfg.PARAMS['inversion_glen_a']
        fs = cfg.PARAMS['inversion_fs']

        def to_optimize(x):
            tasks.mass_conservation_inversion(gdir,
                                              glen_a=glen_a * x[0],
                                              fs=fs * x[1])
            tasks.distribute_thickness_per_altitude(gdir)
            with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
                thick = ds.distributed_thickness.isel(x=('z', df['i']),
                                                      y=('z', df['j']))
                out = (np.abs(thick - df.thick)).mean()
            return out

        opti = optimization.minimize(to_optimize, [1., 1.],
                                     bounds=((0.01, 10), (0.01, 10)),
                                     tol=0.1)
        # Check results and save.
        execute_entity_task(tasks.mass_conservation_inversion, gdirs,
                            glen_a=glen_a*opti['x'][0],
                            fs=0)
        execute_entity_task(tasks.distribute_thickness_per_altitude, gdirs)

        with xr.open_dataset(gdir.get_filepath('gridded_data')) as ds:
            df['oggm'] = ds.distributed_thickness.isel(x=('z', df['i']),
                                                       y=('z', df['j']))
            ds['ref'] = xr.zeros_like(ds.distributed_thickness) * np.NaN
            ds['ref'].data[df['j'], df['i']] = df['thick']

        rmsd = ((df.oggm - df.thick) ** 2).mean() ** .5
        assert rmsd < 60

        dfm = df.mean()
        np.testing.assert_allclose(dfm.thick, dfm.oggm, 10)
        if do_plot:
            import matplotlib.pyplot as plt
            df.plot(kind='scatter', x='oggm', y='thick')
            plt.axis('equal')
            f, (ax1, ax2) = plt.subplots(1, 2, figsize=(8, 3))
            ds.ref.plot(ax=ax1)
            ds.distributed_thickness.plot(ax=ax2)
            plt.tight_layout()
            plt.show()
Exemple #55
0
def single_flowline_glacier_directory(rgi_id, reset=False, prepro_border=80):
    """Prepare a GlacierDirectory for PyGEM (single flowline to start with)

    Parameters
    ----------
    rgi_id : str
        the rgi id of the glacier
    reset : bool
        set to true to delete any pre-existing files. If false (the default),
        the directory won't be re-downloaded if already available locally in
        order to spare time.
    prepro_border : int
        the size of the glacier map: 10, 80, 160, 250

    Returns
    -------
    a GlacierDirectory object
    """

    if type(rgi_id) != str:
        raise ValueError('We expect rgi_id to be a string')
    if 'RGI60-' not in rgi_id:
        raise ValueError('OGGM currently expects IDs to start with RGI60-')

    cfg.initialize()
    wd = utils.gettempdir(dirname='pygem-{}-b{}'.format(rgi_id, prepro_border),
                          reset=reset)
    cfg.PATHS['working_dir'] = wd
    cfg.PARAMS['use_multiple_flowlines'] = False

    # Check if folder is already processed
    try:
        gdir = utils.GlacierDirectory(rgi_id)
        gdir.read_pickle('model_flowlines')
        # If the above works the directory is already processed, return
        return gdir
    except OSError:
        pass

    # If not ready, we download the preprocessed data for this glacier
    gdirs = workflow.init_glacier_regions([rgi_id],
                                          from_prepro_level=2,
                                          prepro_border=prepro_border)
    # Compute all the stuff
    list_talks = [
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.catchment_area,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.compute_downstream_bedshape,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
        tasks.init_present_time_glacier,
    ]
    for task in list_talks:
        # The order matters!
        workflow.execute_entity_task(task, gdirs)

    return gdirs[0]
Exemple #56
0
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    # Climate related tasks
    # see if we can distribute
    workflow.execute_entity_task(tasks.distribute_cru_style, gdirs)
    tasks.compute_ref_t_stars(gdirs)
    tasks.distribute_t_stars(gdirs)

if RUN_INVERSION:
    # Inversion
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    tasks.optimize_inversion_params(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs)

            w_depth = thick - t_altitude
    else:
        w_depth = thick - t_altitude

    print('t_altitude_fromfun', t_altitude)
    print('depth_fromfun', w_depth)
    print('thick_fromfun', thick)
    # print('width', width)
    out = k * thick * w_depth * width / 1e9
    if out < 0:
        out = 0
    return out, w_depth, thick, width


if RUN_GIS_mask:
    execute_entity_task(tasks.glacier_masks, gdirs)

#We copy Columbia glacier dir with the itmix dem
shutil.rmtree(
    os.path.join(WORKING_DIR,
                 'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))
shutil.copytree(
    Columbia_dir,
    os.path.join(WORKING_DIR,
                 'per_glacier/RGI60-01/RGI60-01.10/RGI60-01.10689'))

# Pre-processing tasks
task_list = [
    tasks.compute_centerlines,
    tasks.initialize_flowlines,
    tasks.catchment_area,
Exemple #58
0
# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
# gdirs = workflow.init_glacier_regions(rgidf)

# Prepro tasks
task_list = [
    tasks.glacier_masks,
    tasks.compute_centerlines,
    tasks.compute_downstream_lines,
    tasks.catchment_area,
    tasks.initialize_flowlines,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Climate related task
execute_entity_task(tasks.process_custom_climate_data, gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)

# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)

# Write out glacier statistics
df = utils.glacier_characteristics(gdirs)
fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv')
df.to_csv(fpath)