Beispiel #1
0
    def test_init_present_time_glacier(self):

        gdirs = up_to_inversion()

        # Inversion Results
        cfg.PARAMS['invert_with_sliding'] = True
        cfg.PARAMS['optimize_thick'] = True
        workflow.inversion_tasks(gdirs)

        fpath = os.path.join(cfg.PATHS['working_dir'],
                             'inversion_optim_results.csv')
        df = pd.read_csv(fpath, index_col=0)
        r1 = rmsd(df['ref_volume_km3'], df['oggm_volume_km3'])
        r2 = rmsd(df['ref_volume_km3'], df['vas_volume_km3'])
        self.assertTrue(r1 < r2)

        cfg.PARAMS['invert_with_sliding'] = False
        cfg.PARAMS['optimize_thick'] = False
        workflow.inversion_tasks(gdirs)

        fpath = os.path.join(cfg.PATHS['working_dir'],
                             'inversion_optim_results.csv')
        df = pd.read_csv(fpath, index_col=0)
        r1 = rmsd(df['ref_volume_km3'], df['oggm_volume_km3'])
        r2 = rmsd(df['ref_volume_km3'], df['vas_volume_km3'])
        self.assertTrue(r1 < r2)

        # Init glacier
        d = gdirs[0].read_pickle('inversion_params')
        fs = d['fs']
        glen_a = d['glen_a']
        maxs = cfg.PARAMS['max_shape_param']
        for gdir in gdirs:
            flowline.init_present_time_glacier(gdir)
            mb_mod = massbalance.ConstantMassBalanceModel(gdir)
            fls = gdir.read_pickle('model_flowlines')
            model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.,
                                            fs=fs, glen_a=glen_a)
            _vol = model.volume_km3
            _area = model.area_km2
            if gdir.rgi_id in df.index:
                gldf = df.loc[gdir.rgi_id]
                # TODO: broken but should work
                # assert_allclose(gldf['oggm_volume_km3'], _vol, rtol=0.03)
                # assert_allclose(gldf['ref_area_km2'], _area, rtol=0.03)
                maxo = max([fl.order for fl in model.fls])
                for fl in model.fls:
                    self.assertTrue(np.all(fl.bed_shape > 0))
                    self.assertTrue(np.all(fl.bed_shape <= maxs))
                    if len(model.fls) > 1:
                        if fl.order == (maxo-1):
                            self.assertTrue(fl.flows_to is fls[-1])

        # Test the glacier charac
        dfc = utils.glacier_characteristics(gdirs)
        self.assertTrue(np.all(dfc.terminus_type == 'Land-terminating'))
        cc = dfc[['dem_mean_elev', 'clim_temp_avgh']].corr().values[0, 1]
        self.assertTrue(cc > 0.4)
Beispiel #2
0
    def test_init_present_time_glacier(self):

        gdirs = up_to_inversion()

        # Inversion Results
        cfg.PARAMS['invert_with_sliding'] = True
        cfg.PARAMS['optimize_thick'] = True
        workflow.inversion_tasks(gdirs)

        fpath = os.path.join(cfg.PATHS['working_dir'],
                             'inversion_optim_results.csv')
        df = pd.read_csv(fpath, index_col=0)
        r1 = rmsd(df['ref_volume_km3'], df['oggm_volume_km3'])
        assert r1 < 0.1

        cfg.PARAMS['invert_with_sliding'] = False
        cfg.PARAMS['optimize_thick'] = False
        workflow.inversion_tasks(gdirs)

        fpath = os.path.join(cfg.PATHS['working_dir'],
                             'inversion_optim_results.csv')
        df = pd.read_csv(fpath, index_col=0)
        r1 = rmsd(df['ref_volume_km3'], df['oggm_volume_km3'])
        assert r1 < 0.12

        # Init glacier
        d = gdirs[0].read_pickle('inversion_params')
        fs = d['fs']
        glen_a = d['glen_a']
        for gdir in gdirs:
            flowline.init_present_time_glacier(gdir)
            mb_mod = massbalance.ConstantMassBalance(gdir)
            fls = gdir.read_pickle('model_flowlines')
            model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0.,
                                            fs=fs, glen_a=glen_a)
            _vol = model.volume_km3
            _area = model.area_km2
            if gdir.rgi_id in df.index:
                gldf = df.loc[gdir.rgi_id]
                assert_allclose(gldf['oggm_volume_km3'], _vol, rtol=0.05)
                assert_allclose(gldf['ref_area_km2'], _area, rtol=0.05)
                maxo = max([fl.order for fl in model.fls])
                for fl in model.fls:
                    if len(model.fls) > 1:
                        if fl.order == (maxo-1):
                            self.assertTrue(fl.flows_to is fls[-1])

        # Test the glacier charac
        dfc = utils.glacier_characteristics(gdirs)
        self.assertTrue(np.all(dfc.terminus_type == 'Land-terminating'))
        cc = dfc[['flowline_mean_elev',
                  'tstar_avg_temp_mean_elev']].corr().values[0, 1]
        assert cc < -0.8
        assert np.all(dfc.t_star > 1900)
        assert np.all(dfc.tstar_aar.mean() > 0.5)
Beispiel #3
0
    def test_some_characs(self):

        gdirs = up_to_inversion()

        # Test the glacier charac
        dfc = utils.glacier_characteristics(gdirs)
        self.assertTrue(np.all(dfc.terminus_type == 'Land-terminating'))
        cc = dfc[['flowline_mean_elev',
                  'tstar_avg_temp_mean_elev']].corr().values[0, 1]
        assert cc < -0.8
        assert np.all(dfc.t_star > 1900)
        assert np.all(dfc.tstar_aar.mean() > 0.5)
Beispiel #4
0
    def test_glacier_characs(self):

        gdir = init_hef()

        df = utils.glacier_characteristics([gdir], path=False)
        assert len(df) == 1
        assert np.all(~df.isnull())
        df = df.iloc[0]
        np.testing.assert_allclose(df['dem_mean_elev'],
                                   df['flowline_mean_elev'], atol=5)
        np.testing.assert_allclose(df['tstar_avg_prcp'],
                                   2853, atol=5)
        np.testing.assert_allclose(df['tstar_avg_prcpsol_max_elev'],
                                   2811, atol=5)
Beispiel #5
0
    def test_workflow(self):

        # This is a check that the inversion workflow works fine

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.distribute_t_stars(gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=0)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        df = utils.glacier_characteristics(gdirs)
        assert df.inv_thickness_m[0] < 100

        if do_plot:
            import matplotlib.pyplot as plt
            from oggm.graphics import plot_inversion
            plot_inversion(gdirs)
            plt.show()
Beispiel #6
0
def optimize_per_glacier(gdirs):

    gtd_df = _prepare_inv(gdirs)
    ref_gdirs = gtd_df['ref_gdirs']
    ref_volume_km3 = gtd_df['ref_volume_km3']
    ref_area_km2 = gtd_df['ref_area_km2']
    ref_thickness_m = gtd_df['ref_thickness_m']

    # Optimize without sliding
    log.info('Compute the inversion parameter.')

    fac = []
    for gdir in ref_gdirs:
        def to_optimize(x):
            glen_a = cfg.A * x[0]
            v, a = invert_parabolic_bed(gdir, glen_a=glen_a, fs=0.,
                                        write=False)
            return utils.rmsd(v / a, gtd_df['ref_thickness_m'].loc[gdir.rgi_id])
        opti = optimization.minimize(to_optimize, [1.],
                                    bounds=((0.01, 10), ),
                                    tol=1.e-4)
        # Check results and save.
        fac.append(opti['x'][0])

    # All results
    df = utils.glacier_characteristics(ref_gdirs)

    df['ref_area_km2'] = ref_area_km2
    df['ref_volume_km3'] = ref_volume_km3
    df['ref_thickness_m'] = ref_thickness_m
    df['vas_volume_km3'] = 0.034*(df['ref_area_km2']**1.375)
    df['vas_thickness_m'] = df['vas_volume_km3'] / ref_area_km2 * 1000
    df['fac'] = fac
    fpath = os.path.join(cfg.PATHS['working_dir'],
                         'inversion_optim_pergla.csv')
    df.to_csv(fpath)
Beispiel #7
0
def optimize_thick(gdirs):
    """Optimizes fd based on GlaThiDa thicknesses.

    We use the glacier averaged thicknesses provided by GlaThiDa and correct
    them for differences in area with RGI, using a glacier specific volume-area
    scaling formula.

    Parameters
    ----------
    gdirs: list of oggm.GlacierDirectory objects
    """

    gtd_df = _prepare_inv(gdirs)
    ref_gdirs = gtd_df['ref_gdirs']
    ref_volume_km3 = gtd_df['ref_volume_km3']
    ref_area_km2 = gtd_df['ref_area_km2']
    ref_thickness_m = gtd_df['ref_thickness_m']

    # Optimize without sliding
    log.info('Compute the inversion parameter.')

    def to_optimize(x):
        tmp_ = np.zeros(len(ref_gdirs))
        glen_a = cfg.A * x[0]
        for i, gdir in enumerate(ref_gdirs):
            v, a = mass_conservation_inversion(gdir, glen_a=glen_a,
                                               fs=0., write=False)
            tmp_[i] = v / a
        return utils.rmsd(tmp_, ref_thickness_m)
    opti = optimization.minimize(to_optimize, [1.],
                                bounds=((0.01, 10), ),
                                tol=1.e-4)
    # Check results and save.
    glen_a = cfg.A * opti['x'][0]
    fs = 0.

    # This is for the stats
    oggm_volume_m3 = np.zeros(len(ref_gdirs))
    rgi_area_m2 = np.zeros(len(ref_gdirs))
    for i, gdir in enumerate(ref_gdirs):
        v, a = mass_conservation_inversion(gdir, glen_a=glen_a, fs=fs,
                                           write=False)
        oggm_volume_m3[i] = v
        rgi_area_m2[i] = a
    assert np.allclose(rgi_area_m2 * 1e-6, ref_area_km2)

    # This is for each glacier
    out = dict()
    out['glen_a'] = glen_a
    out['fs'] = fs
    out['factor_glen_a'] = opti['x'][0]
    try:
        out['factor_fs'] = opti['x'][1]
    except IndexError:
        out['factor_fs'] = 0.
    for gdir in gdirs:
        gdir.write_pickle(out, 'inversion_params')

    # This is for the working dir
    # Simple stats
    out['vol_rmsd'] = utils.rmsd(oggm_volume_m3 * 1e-9, ref_volume_km3)
    out['thick_rmsd'] = utils.rmsd(oggm_volume_m3 / (ref_area_km2 * 1e6),
                                   ref_thickness_m)
    log.info('Optimized glen_a and fs with a factor {factor_glen_a:.2f} and '
             '{factor_fs:.2f} for a thick RMSD of {thick_rmsd:.3f}'.format(
        **out))

    df = pd.DataFrame(out, index=[0])
    fpath = os.path.join(cfg.PATHS['working_dir'],
                         'inversion_optim_params.csv')
    df.to_csv(fpath)

    # All results
    df = utils.glacier_characteristics(ref_gdirs)
    df['ref_area_km2'] = ref_area_km2
    df['ref_volume_km3'] = ref_volume_km3
    df['ref_thickness_m'] = ref_thickness_m
    df['oggm_volume_km3'] = oggm_volume_m3 * 1e-9
    df['oggm_thickness_m'] = oggm_volume_m3 / (ref_area_km2 * 1e6)
    df['vas_volume_km3'] = 0.034*(df['ref_area_km2']**1.375)
    df['vas_thickness_m'] = df['vas_volume_km3'] / ref_area_km2 * 1000

    rgi_id = [gdir.rgi_id for gdir in ref_gdirs]
    df = pd.DataFrame(df, index=rgi_id)
    fpath = os.path.join(cfg.PATHS['working_dir'],
                         'inversion_optim_results.csv')
    df.to_csv(fpath)

    # return value for tests
    return out
Beispiel #8
0
tasks.compute_downstream_bedshape(gdir)
tasks.catchment_area(gdir)
tasks.catchment_intersections(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
tasks.process_cru_data(gdir)
tasks.mu_candidates(gdir)
tasks.compute_ref_t_stars([gdir])
tasks.distribute_t_stars([gdir])
tasks.apparent_mb(gdir)
tasks.prepare_for_inversion(gdir)
tasks.volume_inversion(gdir, glen_a=cfg.A, fs=0)
tasks.filter_inversion_output(gdir)
tasks.init_present_time_glacier(gdir)

df = utils.glacier_characteristics([gdir], path=False)

reset = True
seed = 0

tasks.random_glacier_evolution(gdir,
                               nyears=800,
                               seed=0,
                               y0=2000,
                               filesuffix='_2000_def',
                               reset=reset)

tasks.random_glacier_evolution(gdir,
                               nyears=800,
                               seed=0,
                               y0=1920,
Beispiel #9
0
    tasks.catchment_width_correction,
]

if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    for gdir in gdirs:
        gdir.inversion_calving_rate = 0
    execute_entity_task(tasks.process_cru_data, gdirs)
    tasks.distribute_t_stars(gdirs)
    execute_entity_task(tasks.apparent_mb, gdirs)

if RUN_INVERSION:
    # Inversion tasks
    execute_entity_task(tasks.prepare_for_inversion, gdirs, add_debug_var=True)
    execute_entity_task(tasks.volume_inversion,
                        gdirs,
                        glen_a=3.339e-24,
                        fs=0.0)

# Compile output
utils.glacier_characteristics(gdirs,
                              filesuffix='_Lake_land_no_calving_fs_zero_')

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.info("OGGM is done! Time needed: %02d:%02d:%02d" % (h, m, s))
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)

# rgidf = rgidf.loc[rgidf.RGIId.isin(['RGI50-01.10299'])]

print('Number of glaciers: {}'.format(len(rgidf)))


# Go - initialize working directories
# -----------------------------------

# you can use the command below to reset your run -- use with caution!
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)

utils.glacier_characteristics(gdirs)
utils.compile_run_output(gdirs, filesuffix='_fromzero')
utils.compile_run_output(gdirs, filesuffix='_fromzero_newparams')
utils.compile_run_output(gdirs, filesuffix='_fromtoday')
utils.compile_run_output(gdirs, filesuffix='_fromtoday_newparams')

exit()

# Prepro tasks
task_list = [
    # tasks.glacier_masks,
    # tasks.compute_centerlines,
    # tasks.compute_downstream_lines,
    # tasks.initialize_flowlines,
    # tasks.compute_downstream_bedshape,
    # tasks.catchment_area,
                all_calving_data = objt['calving_fluxes']
                all_data_depth = objt['water_depth']
                all_data_H_i = objt['H_ice']
                all_data_width = objt['t_width']
            # we see the final calculated calving flux
            last_calving = all_calving_data[-1]
            last_width = all_data_width

            print('For the glacier', gdir.rgi_id)
            print('last calving value is:', last_calving)

            gdir.inversion_calving_rate = last_calving

    # Calculating everything again with a calving flux assigned and filtering
    # and correcting for negative flux
    cfg.PARAMS['correct_for_neg_flux'] = False
    cfg.PARAMS['filter_for_neg_flux'] = False
    tasks.distribute_t_stars(gdirs)
    execute_entity_task(tasks.apparent_mb, gdirs)
    execute_entity_task(tasks.prepare_for_inversion, gdirs, add_debug_var=True)
    execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=cfg.FS)

    # Write out glacier statistics
    utils.glacier_characteristics(gdirs,
                                  filesuffix='_with_calving_' + suf,
                                  inversion_only=True)

    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.info("OGGM with calving is done! Time needed: %02d:%02d:%02d" %
             (h, m, s))
    def setup_cache(self):

        setattr(full_workflow.setup_cache, "timeout", 360)

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        # Pre-download other files which will be needed later
        utils.get_cru_cl_file()
        utils.get_cru_file(var='tmp')
        utils.get_cru_file(var='pre')

        # Get the RGI glaciers for the run.
        rgi_list = ['RGI60-01.10299', 'RGI60-11.00897', 'RGI60-18.02342']
        rgidf = utils.get_rgi_glacier_entities(rgi_list)

        # We use intersects
        db = utils.get_rgi_intersects_region_file(version='61',
                                                  rgi_ids=rgi_list)
        cfg.set_intersects_db(db)

        # Sort for more efficient parallel computing
        rgidf = rgidf.sort_values('Area', ascending=False)

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.compute_downstream_line,
            tasks.compute_downstream_bedshape,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        execute_entity_task(tasks.local_mustar, gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

        # Inversion tasks
        execute_entity_task(tasks.prepare_for_inversion, gdirs)
        # We use the default parameters for this run
        execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        execute_entity_task(tasks.filter_inversion_output, gdirs)

        # Final preparation for the run
        execute_entity_task(tasks.init_present_time_glacier, gdirs)

        # Random climate representative for the tstar climate, without bias
        # In an ideal world this would imply that the glaciers remain stable,
        # but it doesn't have to be so
        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            bias=0,
                            nyears=100,
                            output_filesuffix='_tstar')

        execute_entity_task(tasks.run_constant_climate,
                            gdirs,
                            y0=1990,
                            nyears=100,
                            output_filesuffix='_pd')

        # Compile output
        utils.glacier_characteristics(gdirs)
        utils.compile_run_output(gdirs, filesuffix='_tstar')
        utils.compile_run_output(gdirs, filesuffix='_pd')
        utils.compile_climate_input(gdirs)

        return gdirs
    tasks.initialize_flowlines,
    tasks.catchment_area,
    tasks.catchment_intersections,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction,
]

if RUN_GIS_PREPRO:
    for task in task_list:
        execute_entity_task(task, gdirs)

if RUN_CLIMATE_PREPRO:
    for gdir in gdirs:
        gdir.inversion_calving_rate = 0
    execute_entity_task(tasks.process_cru_data, gdirs)
    tasks.distribute_t_stars(gdirs)
    execute_entity_task(tasks.apparent_mb, gdirs)

if RUN_INVERSION:
    # Inversion tasks
    execute_entity_task(tasks.prepare_for_inversion, gdirs, add_debug_var=True)
    execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=cfg.FS)

# Compile output
utils.glacier_characteristics(gdirs, filesuffix='_no_calving_cfgFS_')

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.info("OGGM is done! Time needed: %02d:%02d:%02d" % (h, m, s))
task_list = [
    tasks.compute_centerlines,
    tasks.initialize_flowlines,
    tasks.catchment_area,
    tasks.catchment_intersections,
    tasks.catchment_width_geom,
    tasks.catchment_width_correction,
]
for task in task_list:
    execute_entity_task(task, gdirs)

# Climate tasks -- we make sure that calving is = 0 for all tidewater
for gdir in gdirs:
    gdir.inversion_calving_rate = 0

execute_entity_task(tasks.process_cru_data, gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)

# Inversion tasks
execute_entity_task(tasks.prepare_for_inversion, gdirs, add_debug_var=True)
execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=cfg.FS)

# Compile output
utils.glacier_characteristics(gdirs, filesuffix='_Columbia_no_calving_with_sliding_')

# Log
m, s = divmod(time.time() - start, 60)
h, m = divmod(m, 60)
log.info("OGGM is done! Time needed: %02d:%02d:%02d" % (h, m, s))
Beispiel #15
0
def optimize_thick(gdirs):
    """Optimizes fd based on GlaThiDa thicknesses.

    We use the glacier averaged thicknesses provided by GlaThiDa and correct
    them for differences in area with RGI, using a glacier specific volume-area
    scaling formula.

    Parameters
    ----------
    gdirs: list of oggm.GlacierDirectory objects
    """

    gtd_df = _prepare_inv(gdirs)
    ref_gdirs = gtd_df['ref_gdirs']
    ref_volume_km3 = gtd_df['ref_volume_km3']
    ref_area_km2 = gtd_df['ref_area_km2']
    ref_thickness_m = gtd_df['ref_thickness_m']

    # Optimize without sliding
    log.info('Compute the inversion parameter.')

    def to_optimize(x):
        tmp_ = np.zeros(len(ref_gdirs))
        glen_a = cfg.A * x[0]
        for i, gdir in enumerate(ref_gdirs):
            v, a = invert_parabolic_bed(gdir,
                                        glen_a=glen_a,
                                        fs=0.,
                                        write=False)
            tmp_[i] = v / a
        return utils.rmsd(tmp_, ref_thickness_m)

    opti = optimization.minimize(to_optimize, [1.],
                                 bounds=((0.01, 10), ),
                                 tol=1.e-4)
    # Check results and save.
    glen_a = cfg.A * opti['x'][0]
    fs = 0.

    # This is for the stats
    oggm_volume_m3 = np.zeros(len(ref_gdirs))
    rgi_area_m2 = np.zeros(len(ref_gdirs))
    for i, gdir in enumerate(ref_gdirs):
        v, a = invert_parabolic_bed(gdir, glen_a=glen_a, fs=fs, write=False)
        oggm_volume_m3[i] = v
        rgi_area_m2[i] = a
    assert np.allclose(rgi_area_m2 * 1e-6, ref_area_km2)

    # This is for each glacier
    out = dict()
    out['glen_a'] = glen_a
    out['fs'] = fs
    out['factor_glen_a'] = opti['x'][0]
    try:
        out['factor_fs'] = opti['x'][1]
    except IndexError:
        out['factor_fs'] = 0.
    for gdir in gdirs:
        gdir.write_pickle(out, 'inversion_params')

    # This is for the working dir
    # Simple stats
    out['vol_rmsd'] = utils.rmsd(oggm_volume_m3 * 1e-9, ref_volume_km3)
    out['thick_rmsd'] = utils.rmsd(oggm_volume_m3 / (ref_area_km2 * 1e6),
                                   ref_thickness_m)
    log.info(
        'Optimized glen_a and fs with a factor {factor_glen_a:.2f} and '
        '{factor_fs:.2f} for a thick RMSD of {thick_rmsd:.3f}'.format(**out))

    df = pd.DataFrame(out, index=[0])
    fpath = os.path.join(cfg.PATHS['working_dir'],
                         'inversion_optim_params.csv')
    df.to_csv(fpath)

    # All results
    df = utils.glacier_characteristics(ref_gdirs)
    df['ref_area_km2'] = ref_area_km2
    df['ref_volume_km3'] = ref_volume_km3
    df['ref_thickness_m'] = ref_thickness_m
    df['oggm_volume_km3'] = oggm_volume_m3 * 1e-9
    df['oggm_thickness_m'] = oggm_volume_m3 / (ref_area_km2 * 1e6)
    df['vas_volume_km3'] = 0.034 * (df['ref_area_km2']**1.375)
    df['vas_thickness_m'] = df['vas_volume_km3'] / ref_area_km2 * 1000

    rgi_id = [gdir.rgi_id for gdir in ref_gdirs]
    df = pd.DataFrame(df, index=rgi_id)
    fpath = os.path.join(cfg.PATHS['working_dir'],
                         'inversion_optim_results.csv')
    df.to_csv(fpath)

    # return value for tests
    return out
Beispiel #16
0
    cfg.PARAMS['correct_for_neg_flux'] = False
    cfg.PARAMS['filter_for_neg_flux'] = False
    execute_entity_task(tasks.process_cru_data, gdirs)
    tasks.distribute_t_stars(gdirs)
    execute_entity_task(tasks.apparent_mb, gdirs)

if RUN_INVERSION:
    # Inversion tasks
    execute_entity_task(tasks.prepare_for_inversion, gdirs)
    tasks.optimize_inversion_params(gdirs)
    execute_entity_task(tasks.volume_inversion, gdirs, )
    # execute_entity_task(tasks.filter_inversion_output, gdirs)

# Compile output if no calving
if No_calving:
    utils.glacier_characteristics(gdirs, filesuffix='_no_calving')

    # Log
    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.info("OGGM no_calving is done! Time needed: %02d:%02d:%02d" % (h, m, s))

# Calving loop
# -----------------------------------
if With_calving:
    # Re-initializing climate tasks and inversion without calving to be sure
    for gdir in gdirs:
        gdir.inversion_calving_rate = 0

    cfg.PARAMS['correct_for_neg_flux'] = False
    cfg.PARAMS['filter_for_neg_flux'] = False
Beispiel #17
0
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=0)
execute_entity_task(
    tasks.filter_inversion_output,
    gdirs,
)

# Run
execute_entity_task(tasks.init_present_time_glacier, gdirs)

# While the above should work always, this here is no piece of fun
execute_entity_task(tasks.random_glacier_evolution, gdirs)

# Write out glacier statistics
df = utils.glacier_characteristics(gdirs)
fpath = os.path.join(cfg.PATHS['working_dir'], 'glacier_char.csv')
df.to_csv(fpath)

# Plots (if you want)
if PLOTS_DIR == '':
    exit()

utils.mkdir(PLOTS_DIR)
for gd in gdirs:
    bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
    graphics.plot_googlemap(gd)
    plt.savefig(bname + 'ggl.png')
    plt.close()
    graphics.plot_domain(gd)
    plt.savefig(bname + 'dom.png')
                all_calving_data = objt['calving_fluxes']
                all_data_depth = objt['water_depth']
                all_data_H_i = objt['H_ice']
                all_data_width = objt['t_width']
            # we see the final calculated calving flux
            last_calving = all_calving_data[-1]
            last_width = all_data_width

            print('For the glacier', gdir.rgi_id)
            print('last calving value is:', last_calving)

            gdir.inversion_calving_rate = last_calving

    # Calculating everything again with a calving flux assigned and filtering
    # and correcting for negative flux
    cfg.PARAMS['correct_for_neg_flux'] = False
    cfg.PARAMS['filter_for_neg_flux'] = False
    tasks.distribute_t_stars(gdirs)
    execute_entity_task(tasks.apparent_mb, gdirs)
    execute_entity_task(tasks.prepare_for_inversion, gdirs, add_debug_var=True)
    execute_entity_task(tasks.volume_inversion, gdirs, glen_a=cfg.A, fs=cfg.FS)

    # Write out glacier statistics
    utils.glacier_characteristics(gdirs,
                                  filesuffix='_with_calving_corrected_' + suf)

    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.info("OGGM with calving is done! Time needed: %02d:%02d:%02d" %
             (h, m, s))
Beispiel #19
0
tasks.compute_ref_t_stars([gdir])
tasks.distribute_t_stars([gdir])
tasks.apparent_mb(gdir)
tasks.prepare_for_inversion(gdir)
tasks.volume_inversion(gdir, glen_a=cfg.A, fs=0)
tasks.filter_inversion_output(gdir)
tasks.distribute_thickness(gdir, how='per_interpolation')
tasks.init_present_time_glacier(gdir)

# On single flowline run
tasks.random_glacier_evolution(gdir, nyears=800, bias=0, seed=0,
                               filesuffix='_fl_def',
                               zero_initial_glacier=True)

# For metadata
utils.glacier_characteristics([gdir], path=base_dir+'/hef_fl_out.csv')

# OK now the distributed stuffs
with netCDF4.Dataset(gdir.get_filepath('gridded_data')) as nc:
    topo = nc.variables['topo'][:]
    ice_thick = nc.variables['thickness'][:]

# Take a subset of the topo for easier regridding
ds = gdir.grid.to_dataset()
ds['topo'] = (('y', 'x'), topo)
ds['ice_thick'] = (('y', 'x'), ice_thick)
ds = ds.salem.subset(corners=((1, 3), (250, 202)), crs=gdir.grid)

# Possible are factors 1 (50m) 2 (100m) or 5 (250m)
factor = 2
topo = reduce(ds.topo.values, factor=factor)