Пример #1
0
def test_find_t0(self):

    from oggm.tests.funcs import init_hef
    from oggm.core import flowline
    import pandas as pd
    import matplotlib.pyplot as plt
    do_plot = True

    gdir = init_hef(border=80, invert_with_sliding=False)

    flowline.init_present_time_glacier(gdir)
    glacier = gdir.read_pickle('model_flowlines')
    df = pd.read_csv(utils.get_demo_file('hef_lengths.csv'), index_col=0)
    df.columns = ['Leclercq']
    df = df.loc[1950:]

    vol_ref = flowline.FlowlineModel(glacier).volume_km3

    init_bias = 94.  # so that "went too far" comes once on travis
    rtol = 0.005

    flowline.iterative_initial_glacier_search(gdir,
                                              y0=df.index[0],
                                              init_bias=init_bias,
                                              rtol=rtol,
                                              write_steps=True)

    past_model = flowline.FileModel(gdir.get_filepath('model_run'))

    vol_start = past_model.volume_km3
    bef_fls = copy.deepcopy(past_model.fls)

    mylen = past_model.length_m_ts()
    df['oggm'] = mylen[12::12].values
    df = df - df.iloc[-1]

    past_model.run_until(2003)

    vol_end = past_model.volume_km3
    np.testing.assert_allclose(vol_ref, vol_end, rtol=0.05)

    rmsd = utils.rmsd(df.Leclercq, df.oggm)
    self.assertTrue(rmsd < 1000.)

    if do_plot:  # pragma: no cover
        df.plot()
        plt.ylabel('Glacier length (relative to 2003)')
        plt.show()
        fig = plt.figure()
        lab = 'ref (vol={:.2f}km3)'.format(vol_ref)
        plt.plot(glacier[-1].surface_h, 'k', label=lab)
        lab = 'oggm start (vol={:.2f}km3)'.format(vol_start)
        plt.plot(bef_fls[-1].surface_h, 'b', label=lab)
        lab = 'oggm end (vol={:.2f}km3)'.format(vol_end)
        plt.plot(past_model.fls[-1].surface_h, 'r', label=lab)

        plt.plot(glacier[-1].bed_h, 'gray', linewidth=2)
        plt.legend(loc='best')
        plt.show()
Пример #2
0
    def test_random(self):

        # Fake Reset (all these tests are horribly coded)
        if not os.path.exists(TEST_DIR):
            os.makedirs(TEST_DIR)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
        gdirs = up_to_inversion()

        workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
        workflow.execute_entity_task(flowline.run_random_climate,
                                     gdirs,
                                     nyears=200,
                                     seed=0,
                                     store_monthly_step=True,
                                     output_filesuffix='_test')

        for gd in gdirs:

            path = gd.get_filepath('model_run', filesuffix='_test')
            # See that we are running ok
            with flowline.FileModel(path) as model:
                vol = model.volume_km3_ts()
                area = model.area_km2_ts()
                length = model.length_m_ts()

                self.assertTrue(np.all(np.isfinite(vol) & vol != 0.))
                self.assertTrue(np.all(np.isfinite(area) & area != 0.))
                self.assertTrue(np.all(np.isfinite(length) & length != 0.))

            ds_diag = gd.get_filepath('model_diagnostics', filesuffix='_test')
            ds_diag = xr.open_dataset(ds_diag)
            df = vol.to_frame('RUN')
            df['DIAG'] = ds_diag.volume_m3.to_series() * 1e-9
            assert_allclose(df.RUN, df.DIAG)
            df = area.to_frame('RUN')
            df['DIAG'] = ds_diag.area_m2.to_series() * 1e-6
            assert_allclose(df.RUN, df.DIAG)
            df = length.to_frame('RUN')
            df['DIAG'] = ds_diag.length_m.to_series()
            assert_allclose(df.RUN, df.DIAG)

        # Test output
        ds = utils.compile_run_output(gdirs, filesuffix='_test')
        assert_allclose(ds_diag.volume_m3, ds.volume.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.area_m2, ds.area.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.length_m, ds.length.sel(rgi_id=gd.rgi_id))
        # Test output
        ds = utils.compile_run_output(gdirs, filesuffix='_test')
        df = ds.volume.sel(rgi_id=gd.rgi_id).to_series().to_frame('OUT')
        df['RUN'] = ds_diag.volume_m3.to_series()
        assert_allclose(df.RUN, df.OUT)
Пример #3
0
    def test_random(self):

        # Fake Reset (all these tests are horribly coded)
        if not os.path.exists(TEST_DIR):
            os.makedirs(TEST_DIR)
        with open(CLI_LOGF, 'wb') as f:
            pickle.dump('none', f)
        gdirs = up_to_inversion(reset=False)

        # First tests
        df = utils.compile_glacier_statistics(gdirs)
        df['volume_before_calving_km3'] = df['volume_before_calving'] * 1e-9
        assert np.sum(~df.volume_before_calving.isnull()) == 2
        dfs = df.iloc[:2]
        assert np.all(dfs['volume_before_calving_km3'] < dfs['inv_volume_km3'])
        assert_allclose(df['inv_flowline_glacier_area'] * 1e-6,
                        df['rgi_area_km2'])

        workflow.execute_entity_task(flowline.init_present_time_glacier, gdirs)
        # Check init_present_time_glacier not messing around too much
        for gd in gdirs:
            from oggm.core.massbalance import LinearMassBalance
            from oggm.core.flowline import FluxBasedModel
            mb_mod = LinearMassBalance(ela_h=2500)
            fls = gd.read_pickle('model_flowlines')
            model = FluxBasedModel(fls, mb_model=mb_mod)
            df.loc[gd.rgi_id, 'start_area_km2'] = model.area_km2
            df.loc[gd.rgi_id, 'start_volume_km3'] = model.volume_km3
            df.loc[gd.rgi_id, 'start_length'] = model.length_m
        assert_allclose(df['rgi_area_km2'], df['start_area_km2'], rtol=0.01)
        assert_allclose(df['rgi_area_km2'].sum(),
                        df['start_area_km2'].sum(),
                        rtol=0.005)
        assert_allclose(df['inv_volume_km3'], df['start_volume_km3'])
        assert_allclose(df['inv_volume_km3'].sum(),
                        df['start_volume_km3'].sum())
        assert_allclose(df['main_flowline_length'], df['start_length'])

        workflow.execute_entity_task(flowline.run_random_climate,
                                     gdirs,
                                     nyears=100,
                                     seed=0,
                                     store_monthly_step=True,
                                     output_filesuffix='_test')

        for gd in gdirs:
            path = gd.get_filepath('model_run', filesuffix='_test')
            # See that we are running ok
            with flowline.FileModel(path) as model:
                vol = model.volume_km3_ts()
                area = model.area_km2_ts()
                length = model.length_m_ts()

                self.assertTrue(np.all(np.isfinite(vol) & vol != 0.))
                self.assertTrue(np.all(np.isfinite(area) & area != 0.))
                self.assertTrue(np.all(np.isfinite(length) & length != 0.))

            ds_diag = gd.get_filepath('model_diagnostics', filesuffix='_test')
            ds_diag = xr.open_dataset(ds_diag)
            df = vol.to_frame('RUN')
            df['DIAG'] = ds_diag.volume_m3.to_series() * 1e-9
            assert_allclose(df.RUN, df.DIAG)
            df = area.to_frame('RUN')
            df['DIAG'] = ds_diag.area_m2.to_series() * 1e-6
            assert_allclose(df.RUN, df.DIAG)
            df = length.to_frame('RUN')
            df['DIAG'] = ds_diag.length_m.to_series()
            assert_allclose(df.RUN, df.DIAG)

        # Test output
        ds = utils.compile_run_output(gdirs, input_filesuffix='_test')
        assert_allclose(ds_diag.volume_m3, ds.volume.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.area_m2, ds.area.sel(rgi_id=gd.rgi_id))
        assert_allclose(ds_diag.length_m, ds.length.sel(rgi_id=gd.rgi_id))
        df = ds.volume.sel(rgi_id=gd.rgi_id).to_series().to_frame('OUT')
        df['RUN'] = ds_diag.volume_m3.to_series()
        assert_allclose(df.RUN, df.OUT)

        # Compare to statistics
        df = utils.compile_glacier_statistics(gdirs)
        df['y0_vol'] = ds.volume.sel(rgi_id=df.index, time=0) * 1e-9
        df['y0_area'] = ds.area.sel(rgi_id=df.index, time=0) * 1e-6
        df['y0_len'] = ds.length.sel(rgi_id=df.index, time=0)
        assert_allclose(df['rgi_area_km2'], df['y0_area'], 0.06)
        assert_allclose(df['inv_volume_km3'], df['y0_vol'], 0.04)
        assert_allclose(df['main_flowline_length'], df['y0_len'])

        # Calving stuff
        assert ds.isel(rgi_id=0).calving[-1] > 0
        assert ds.isel(rgi_id=0).calving_rate[-1] > 0
        assert ds.isel(rgi_id=0).volume_bsl[-1] == 0
        assert ds.isel(rgi_id=0).volume_bwl[-1] > 0
        assert ds.isel(rgi_id=1).calving[-1] > 0
        assert ds.isel(rgi_id=1).calving_rate[-1] > 0
        assert not np.isfinite(ds.isel(rgi_id=1).volume_bsl[-1])
Пример #4
0
cfg.PATHS['working_dir'] = os.path.join(package_directory, 'data')

# Multiprocessing is a bad idea in a wsgi app
cfg.PARAMS['use_multiprocessing'] = False

# Acc
ds = xr.open_dataset(os.path.join(package_directory, 'data', 'run_output_08.nc'))

# Go - initialize working directories
gdirs = workflow.init_glacier_regions()

models = []
point_lons = []
point_lats = []
for gdir in gdirs:
    model = flowline.FileModel(gdir.get_filepath('model_run',
                                        filesuffix='_08'))
    coords = []
    for fl in model.fls:
        x, y = fl.line.coords.xy
        lon, lat = gdir.grid.ij_to_crs(x, y, salem.wgs84)
        point_lons = np.append(point_lons, lon)
        point_lats = np.append(point_lats, lat)
        coords.append((lon, lat))
    model.coords = coords
    models.append(model)

time_range = [0, 300]

map_lon = 10.87
map_lat = 46.85
map_zoom = 10
Пример #5
0
cfg.PATHS['working_dir'] = WORKING_DIR

# Initialize from existing directories
# (note that we don't need the RGI file:
# this can be slow sometimes but it works)
gdirs = workflow.init_glacier_regions()

# Plot: we will show the state of all four glaciers at the beginning and at
# the end of the commitment simulation
f, axs = plt.subplots(2, 3, figsize=(14, 6))

for i in range(3):
    ax = axs[0, i]
    gdir = gdirs[i]
    # Use the model ouptut file to simulate the glacier evolution
    model = flowline.FileModel(
        gdir.get_filepath('model_run', filesuffix='_commitment'))
    graphics.plot_modeloutput_map(gdirs[i],
                                  model=model,
                                  ax=ax,
                                  lonlat_contours_kwargs={'interval': 0})
    ax = axs[1, i]
    model.run_until(200)
    graphics.plot_modeloutput_map(gdirs[i],
                                  model=model,
                                  ax=ax,
                                  lonlat_contours_kwargs={'interval': 0})

plt.tight_layout()
plt.show()
Пример #6
0
        tasks.prepare_for_inversion,  # This is a preprocessing task
        tasks.mass_conservation_inversion,  # This does the actual job
        tasks.
        filter_inversion_output  # This smoothes the thicknesses at the tongue a little
    ]
    for task in list_talks:
        workflow.execute_entity_task(task, gdirs)

    # plot
    #graphics.plot_inversion(gdirs, figsize=(8, 7))

    # from tutorial
    tasks.init_present_time_glacier(gdir)
    tasks.run_constant_climate(gdir, nyears=100, y0=2000)

    fmod = flowline.FileModel(gdir.get_filepath('model_run'))
    fmod.run_until(0)
    #graphics.plot_modeloutput_map(gdir, model=fmod) # plot

    # get glacier flowline and bed elevation
    # Main flowline from FlowlineMassBalance
    fl = fmod.fls[-1]
    i, j = fl.line.xy  # xy flowline on grid
    lons, lats = gdir.grid.ij_to_crs(i, j, crs='EPSG:4326')  # to WGS84

    df_coords = pd.DataFrame(index=fl.dis_on_line * gdir.grid.dx)
    df_coords.index.name = 'Distance along flowline'
    df_coords['lon'] = lons
    df_coords['lat'] = lats
    df_coords['bed_elevation'] = fl.bed_h