Esempio n. 1
0
def redo_all_plots(vdf_dict):
    """
    This function will redo all crossvalidation plots. Time consuming!


    Sometimes its necessary to redo all crossvalidation plots.
    E.g. if you want to change the appearence of a plot.

    Parameters
    ----------
    vdf_dict: Dict of pandas.DataFrames
        containing all necessary information
    """

    for vdf in vdf_dict.values():
        for nr, df in vdf.iterrows():

            utils.mkdir(df['pd'])

            # make minor plots
            if df.min_maj == 'minor':
                # try to make plots
                crossval_timeseries(df.file, df.pd)
                crossval_histogram(df.file, df.pd)

            elif df.min_maj == 'major':
                crossval_boxplot(df.file, df.pd)
Esempio n. 2
0
def init_columbia_eb(dir_name, reset=False):

    from oggm.core import gis, centerlines
    import geopandas as gpd

    # test directory
    testdir = os.path.join(get_test_dir(), dir_name)
    mkdir(testdir, reset=reset)

    # Init
    cfg.initialize()
    cfg.PATHS['working_dir'] = testdir
    cfg.PARAMS['use_intersects'] = False
    cfg.PATHS['dem_file'] = get_demo_file('dem_Columbia.tif')
    cfg.PARAMS['border'] = 10

    entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0]
    gdir = oggm.GlacierDirectory(entity)
    if gdir.has_file('climate_historical'):
        return gdir

    gis.define_glacier_region(gdir)
    gis.simple_glacier_masks(gdir)
    centerlines.elevation_band_flowline(gdir)
    centerlines.fixed_dx_elevation_band_flowline(gdir)
    centerlines.compute_downstream_line(gdir)
    tasks.process_dummy_cru_file(gdir, seed=0)
    return gdir
Esempio n. 3
0
    def setup_cache(self):

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        hef_file = get_demo_file('Hintereisferner_RGI5.shp')
        entity = gpd.read_file(hef_file).iloc[0]

        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)

        tasks.define_glacier_region(gdir, entity=entity)
        tasks.glacier_masks(gdir)
        tasks.compute_centerlines(gdir)
        tasks.initialize_flowlines(gdir)
        tasks.compute_downstream_line(gdir)
        tasks.compute_downstream_bedshape(gdir)
        tasks.catchment_area(gdir)
        tasks.catchment_intersections(gdir)
        tasks.catchment_width_geom(gdir)
        tasks.catchment_width_correction(gdir)
        tasks.process_custom_climate_data(gdir)
        tasks.mu_candidates(gdir)
        mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']
        res = climate.t_star_from_refmb(gdir, mbdf)
        tasks.local_mustar(gdir, tstar=res['t_star'], bias=res['bias'])
        tasks.apparent_mb(gdir)

        tasks.prepare_for_inversion(gdir)
        tasks.mass_conservation_inversion(gdir)

        return gdir
Esempio n. 4
0
    def setup_cache(self):

        setattr(full_workflow.setup_cache, "timeout", 360)

        utils.mkdir(self.testdir, reset=True)
        self.cfg_init()

        entity = gpd.read_file(get_demo_file('01_rgi60_Columbia.shp')).iloc[0]
        gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)

        tasks.define_glacier_region(gdir, entity=entity)
        tasks.glacier_masks(gdir)
        tasks.compute_centerlines(gdir)
        tasks.initialize_flowlines(gdir)
        tasks.compute_downstream_line(gdir)
        tasks.compute_downstream_bedshape(gdir)
        tasks.catchment_area(gdir)
        tasks.catchment_intersections(gdir)
        tasks.catchment_width_geom(gdir)
        tasks.catchment_width_correction(gdir)
        climate.process_dummy_cru_file(gdir, seed=0)

        # Test default k (it overshoots)
        df1 = utils.find_inversion_calving(gdir)

        # Test with smaller k (it doesn't overshoot)
        cfg.PARAMS['k_calving'] = 0.2
        df2 = utils.find_inversion_calving(gdir)

        return (df1.calving_flux.values, df1.mu_star.values,
                df2.calving_flux.values, df2.mu_star.values)
Esempio n. 5
0
def test_ice_cap():

    testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_icecap')
    utils.mkdir(testdir)

    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-05.08389.tif')
    cfg.PARAMS['border'] = 20
    cfg.set_divides_db(get_demo_file('divides_RGI50-05.08389.shp'))

    hef_file = get_demo_file('RGI50-05.08389.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True)
    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)

    # We should have five groups
    lines = gdir.read_pickle('downstream_lines', div_id=0)
    assert len(np.unique(lines.group))==5

    # This just checks that it works
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)

    fig, ax = plt.subplots()
    graphics.plot_catchment_width(gdir, ax=ax, add_intersects=True,
                                  add_touches=True)
    fig.tight_layout()
    return fig
Esempio n. 6
0
def make_fake_zipdir(dir_path, fakefile=None):
    """Creates a directory with a file in it if asked to, then compresses it"""
    utils.mkdir(dir_path)
    if fakefile:
        touch(os.path.join(dir_path, fakefile))
    shutil.make_archive(dir_path, 'zip', dir_path)
    return dir_path + '.zip'
Esempio n. 7
0
def test_ice_cap():

    testdir = os.path.join(get_test_dir(), 'tmp_icecap')
    utils.mkdir(testdir, reset=True)

    cfg.initialize()
    cfg.PARAMS['use_intersects'] = False
    cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-05.08389.tif')
    cfg.PARAMS['border'] = 60
    cfg.PATHS['working_dir'] = testdir

    df = gpd.read_file(get_demo_file('divides_RGI50-05.08389.shp'))
    df['Area'] = df.Area * 1e-6  # cause it was in m2
    df['RGIId'] = ['RGI50-05.08389_d{:02d}'.format(d + 1) for d in df.index]

    gdirs = workflow.init_glacier_regions(df)
    workflow.gis_prepro_tasks(gdirs)

    from salem import mercator_grid, Map
    smap = mercator_grid((gdirs[0].cenlon, gdirs[0].cenlat),
                         extent=[20000, 23000])
    smap = Map(smap)

    fig, ax = plt.subplots()
    graphics.plot_catchment_width(gdirs,
                                  ax=ax,
                                  add_intersects=True,
                                  add_touches=True,
                                  smap=smap)
    fig.tight_layout()
    shutil.rmtree(testdir)
    return fig
Esempio n. 8
0
def plot_experiment(gdir, ex_mod, ys, plot_dir):

    x = np.arange(ex_mod.fls[-1].nx) * ex_mod.fls[-1].dx * \
        ex_mod.fls[-1].map_dx

    fig = plt.figure(figsize=(15, 14))
    grid = plt.GridSpec(2, 1, hspace=0.2, wspace=0.2)
    ax1 = plt.subplot(grid[0, 0])
    ax2 = plt.subplot(grid[1, 0], sharex=ax1)

    if gdir.name != '':
        ax1.set_title(gdir.rgi_id + ':' + gdir.name)
    else:
        ax1.set_title(gdir.rgi_id)

    # plot experiments.py, run until ys
    ex_mod = deepcopy(ex_mod)
    ex_mod.reset_y0(ys)
    ex_mod.run_until(ys)
    i = np.where(ex_mod.fls[-1].thick > 0)[0][-1] + 10
    i = ex_mod.fls[-1].nx
    ax1.plot(x[:i],
             ex_mod.fls[-1].surface_h[:i],
             'k:',
             label=r'$z_{' + str(ys) + '}^{exp}$',
             linewidth=3)
    ax1.plot(x[:i], ex_mod.fls[-1].bed_h[:i], 'k', label=r'$b$', linewidth=3)

    ex_mod.run_until(2000)

    ax2.plot(x[:i],
             ex_mod.fls[-1].surface_h[:i],
             'k:',
             label=r'$z_{2000}^{exp = obs} $',
             linewidth=3)
    ax2.plot(x[:i], ex_mod.fls[-1].bed_h[:i], 'k', label=r'$b$', linewidth=3)

    # add figure names and legends
    add_at(ax1, r"a", loc=3)
    add_at(ax2, r"b", loc=3)

    ax1.legend(loc=1)
    ax2.legend(loc=1)

    ax1.set_ylabel('Altitude (m)')
    ax1.set_xlabel('Distance along the main flowline (m)')
    ax2.set_ylabel('Altitude (m)')
    ax2.set_xlabel('Distance along the main flowline (m)')

    ax1.tick_params(axis='both', which='major')
    ax2.tick_params(axis='both', which='major')

    plot_dir = os.path.join(plot_dir, '00_experiment')
    utils.mkdir(plot_dir)
    fig_name = 'experiment_' + str(ys) + '_' + gdir.rgi_id
    plt.savefig(os.path.join(plot_dir, fig_name + '.pdf'), dpi=300)
    plt.savefig(os.path.join(plot_dir, fig_name + '.png'), dpi=300)
    #plt.show()
    plt.close()
Esempio n. 9
0
 def setUp(self):
     self.dldir = os.path.join(get_test_dir(), 'tmp_download')
     utils.mkdir(self.dldir)
     cfg.initialize()
     cfg.PATHS['dl_cache_dir'] = os.path.join(self.dldir, 'dl_cache')
     cfg.PATHS['working_dir'] = os.path.join(self.dldir, 'wd')
     cfg.PATHS['tmp_dir'] = os.path.join(self.dldir, 'extract')
     self.reset_dir()
Esempio n. 10
0
def test_coxe():

    testdir = os.path.join(get_test_dir(), 'tmp_coxe')
    utils.mkdir(testdir, reset=True)

    # Init
    cfg.initialize()
    cfg.PARAMS['use_intersects'] = False
    cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif')
    cfg.PARAMS['border'] = 40
    cfg.PARAMS['clip_tidewater_border'] = False
    cfg.PARAMS['use_multiple_flowlines'] = False
    cfg.PARAMS['use_kcalving_for_inversion'] = True
    cfg.PARAMS['use_kcalving_for_run'] = True
    cfg.PARAMS['trapezoid_lambdas'] = 1

    hef_file = get_demo_file('rgi_RGI50-01.10299.shp')
    entity = gpd.read_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True)
    gis.define_glacier_region(gdir)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.initialize_flowlines(gdir)
    centerlines.compute_downstream_line(gdir)
    centerlines.compute_downstream_bedshape(gdir)
    centerlines.catchment_area(gdir)
    centerlines.catchment_intersections(gdir)
    centerlines.catchment_width_geom(gdir)
    centerlines.catchment_width_correction(gdir)
    climate.apparent_mb_from_linear_mb(gdir)
    inversion.prepare_for_inversion(gdir)
    inversion.mass_conservation_inversion(gdir)
    inversion.filter_inversion_output(gdir)

    flowline.init_present_time_glacier(gdir)

    fls = gdir.read_pickle('model_flowlines')

    p = gdir.read_pickle('linear_mb_params')
    mb_mod = massbalance.LinearMassBalance(ela_h=p['ela_h'], grad=p['grad'])
    mb_mod.temp_bias = -0.3
    model = flowline.FluxBasedModel(fls,
                                    mb_model=mb_mod,
                                    y0=0,
                                    inplace=True,
                                    is_tidewater=True)

    # run
    model.run_until(200)
    assert model.calving_m3_since_y0 > 0

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdir, ax=ax, model=model)
    fig.tight_layout()
    shutil.rmtree(testdir)
    return fig
Esempio n. 11
0
def idealized_experiment(use_experiment_glaciers=None,
                         inversion_settings_all=None,
                         working_dir='',
                         output_folder='',
                         params_file=None,
                         override_params=None,
                         logging_level='WORKFLOW'):
    # Local paths
    if override_params is None:
        override_params = {}

    utils.mkdir(working_dir)
    override_params['working_dir'] = working_dir

    utils.mkdir(output_folder)

    cfg.initialize(file=params_file,
                   params=override_params,
                   logging_level=logging_level,
                   future=True)

    print('Create glacier directories with idealized glaciers:')
    # Size of the map around the glacier.
    if cfg.PARAMS['border'] != 160:
        msg = (f"Border is {cfg.PARAMS['border']} but experiments was "
               f"created with border=160!")
        warnings.warn(msg)
    # Degree of processing level.
    from_prepro_level = 3
    # URL of the preprocessed gdirs
    # we use elevation bands flowlines here
    base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/' \
               'L3-L5_files/CRU/elev_bands/qc3/pcp2.5/no_match/'

    gdirs = create_idealized_experiments(
        use_experiment_glaciers,
        prepro_border=cfg.PARAMS['border'],
        from_prepro_level=from_prepro_level,
        base_url=base_url,
    )

    print('Finished creation of directories.')

    print('Start experiments:')

    all_experiments = []
    for inv_setting in inversion_settings_all:
        for gdir in gdirs:
            all_experiments.append((gdir,
                                    dict(inversion_settings=inv_setting,
                                         output_folder=output_folder)))

    workflow.execute_entity_task(conduct_combine_inversion, all_experiments)

    print('Experiments finished!')
def find_residual(gdir, a=-2000,b=2000):

    try:

        max_it = 15
        i = 0
        bounds = [a,b]

        df = pd.DataFrame()

        fls = gdir.read_pickle('model_flowlines')
        mod = FluxBasedModel(flowlines=fls)

        while i < max_it:
            bias = round((bounds[0] + bounds[1]) / 2,1)
            ex_mod2 = _run_experiment(gdir, bias)
            fit = fitness_function(ex_mod2,mod)
            df = df.append(pd.Series({'bias':bias,'fitness':fit}),ignore_index=True)

            if bounds[1]-bounds[0]<=1:
                break

            elif ex_mod2.area_km2 > mod.area_km2:
                bounds[0] = bias
            else:
                bounds[1] = bias
            i +=1

        # best bias found
        bias = df.iloc[df.fitness.idxmin()].bias
        rp = gdir.get_filepath('model_run', filesuffix='_advanced_experiment_' + str(bias))
        model = FileModel(rp)
        model.run_until(2000)

        rp = gdir.get_filepath('model_run', filesuffix='_advanced_experiment_' + str(0.0))
        ex_mod = FileModel(rp)
        ex_mod.run_until(2000)

        plt.figure(figsize=(15,10))
        plt.plot(model.fls[-1].surface_h,'r',label='best')
        plt.plot(mod.fls[-1].surface_h, 'orange', label='original')
        plt.plot(ex_mod.fls[-1].surface_h, 'r:', label='old experiment')
        plt.plot(model.fls[-1].bed_h,'k', label='bed')
        plt.legend()
        utils.mkdir(os.path.join(cfg.PATHS['plot_dir'],'bias_test'))
        plt.savefig(os.path.join(cfg.PATHS['plot_dir'],'bias_test',gdir.rgi_id+'.png'),dpi=200)
        plt.show()
        diff = mod.area_km2 - model.area_km2_ts()[2000]
        model.reset_y0(1865)

        series = pd.Series({'rgi_id':gdir.rgi_id,'bias':bias,'iterations':i, 'fitness':fit, 'area_diff':diff, 'model':model})
    except:
        series =  pd.Series({'rgi_id':gdir.rgi_id})

    return series
Esempio n. 13
0
def get_test_dir():

    s = get_ident()
    out = os.path.join(cfg.PATHS['test_dir'], s)
    mkdir(out)

    # If new ident, remove all other dirs so spare space
    for d in os.listdir(cfg.PATHS['test_dir']):
        if d and d != s:
            shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
    return out
Esempio n. 14
0
    def test_shapefile_output(self):

        gdirs = up_to_climate(use_mp=True)

        fpath = os.path.join(_TEST_DIR, 'centerlines.shp')
        write_centerlines_to_shape(gdirs, path=fpath)

        import salem
        shp = salem.read_shapefile(fpath)
        self.assertTrue(shp is not None)
        shp = shp.loc[shp.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp), 3)
        self.assertEqual(shp.loc[shp.LE_SEGMENT.idxmax()].MAIN, 1)

        fpath = os.path.join(_TEST_DIR, 'flowlines.shp')
        write_centerlines_to_shape(gdirs, path=fpath, flowlines_output=True)
        shp_f = salem.read_shapefile(fpath)
        self.assertTrue(shp_f is not None)
        shp_f = shp_f.loc[shp_f.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_f), 3)
        self.assertEqual(shp_f.loc[shp_f.LE_SEGMENT.idxmax()].MAIN, 1)
        # The flowline is cut so shorter
        assert shp_f.LE_SEGMENT.max() < shp.LE_SEGMENT.max() * 0.8

        fpath = os.path.join(_TEST_DIR, 'widths_geom.shp')
        write_centerlines_to_shape(gdirs,
                                   path=fpath,
                                   geometrical_widths_output=True)
        # Salem can't read it
        shp_w = gpd.read_file(fpath)
        self.assertTrue(shp_w is not None)
        shp_w = shp_w.loc[shp_w.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_w), 90)

        fpath = os.path.join(_TEST_DIR, 'widths_corr.shp')
        write_centerlines_to_shape(gdirs,
                                   path=fpath,
                                   corrected_widths_output=True)
        # Salem can't read it
        shp_w = gpd.read_file(fpath)
        self.assertTrue(shp_w is not None)
        shp_w = shp_w.loc[shp_w.RGIID == 'RGI60-11.00897']
        self.assertEqual(len(shp_w), 90)

        # Test that one wrong glacier still works
        base_dir = os.path.join(cfg.PATHS['working_dir'], 'dummy_pergla')
        utils.mkdir(base_dir, reset=True)
        gdirs = workflow.execute_entity_task(utils.copy_to_basedir,
                                             gdirs,
                                             base_dir=base_dir,
                                             setup='all')
        os.remove(gdirs[0].get_filepath('centerlines'))
        cfg.PARAMS['continue_on_error'] = True
        write_centerlines_to_shape(gdirs)
Esempio n. 15
0
def test_coxe():

    testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_coxe')
    utils.mkdir(testdir)

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif')
    cfg.PARAMS['border'] = 40
    cfg.PARAMS['use_multiple_flowlines'] = False

    hef_file = get_demo_file('rgi_RGI50-01.10299.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True)
    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)

    # Just check if the rest runs
    centerlines.compute_downstream_bedshape(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.apparent_mb_from_linear_mb(gdir)
    inversion.prepare_for_inversion(gdir)
    inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A, 'fs': 0})
    inversion.filter_inversion_output(gdir)

    flowline.init_present_time_glacier(gdir)

    fls = gdir.read_pickle('model_flowlines')

    p = gdir.read_pickle('linear_mb_params')
    mb_mod = massbalance.LinearMassBalanceModel(ela_h=p['ela_h'],
                                                grad=p['grad'])
    mb_mod.temp_bias = -0.3
    model = flowline.FluxBasedModel(fls,
                                    mb_model=mb_mod,
                                    y0=0,
                                    is_tidewater=True)

    # run
    model.run_until(200)
    assert model.calving_m3_since_y0 > 0

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdir, ax=ax, model=model)
    fig.tight_layout()
    return fig
Esempio n. 16
0
def website_main():
    # setup jinja
    file_loader = FileSystemLoader(mbcfg.PATHS['jinjadir'])
    env = Environment(loader=file_loader)

    # make a catalogue from all stored versions first. If this fails: stop!
    vdf = catalog_storaged_files()

    # clean all potential old html files
    files = glob.glob(mbcfg.PATHS['webroot'] + '/**/*.html', recursive=True)
    for fl in files:
        os.remove(fl)

    # copy logos
    imgpth = os.path.join(mbcfg.PATHS['webroot'], 'img')
    logo = 'oggm_s_alpha.png'
    utils.mkdir(imgpth)
    copyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)),
                          'favicon.ico'),
             os.path.join(imgpth, 'favicon.ico'))
    copyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), logo),
             os.path.join(imgpth, logo))

    # make a dictonary with the latest versions vor linking
    nbpaths = {}
    for vers in vdf.keys():
        try:
            vpath = vdf[vers].iloc[-1].version
        except IndexError:
            vpath = ''
        nbpaths[vers] = os.path.join(vpath, '%s.html' % vers)
        nbpaths['webroot'] = 'index.html'
        nbpaths['icon'] = 'img/favicon.ico'
        nbpaths['logo'] = 'img/%s' % logo

    # create index page
    create_index(env, nbpaths)

    if mbcfg.PARAMS['redo_all_plots']:
        redo_all_plots(vdf)

    # split all available files and process seperately:
    # global reference glaciers, short crossvalidation
    create_minor_website(env, vdf['cru_short'], 'cru_short.html', nbpaths)
    # global reference glaciers, extended crossvalidation
    create_major_website(env, vdf['cru_extended'], 'cru_extended.html',
                         nbpaths)
    # HISTALP reference glaciers, short crossvalidation
    create_minor_website(env, vdf['histalp_short'], 'histalp_short.html',
                         nbpaths)
    # HISTALP reference glaciers, extended crossvalidation
    create_major_website(env, vdf['histalp_extended'], 'histalp_extended.html',
                         nbpaths)
Esempio n. 17
0
def get_mean_temps_2k(rgi, return_prcp):
    from oggm import cfg, utils, workflow, tasks
    from oggm.core.massbalance import PastMassBalance

    # Initialize OGGM
    cfg.initialize()
    wd = utils.gettempdir(reset=True)
    cfg.PATHS['working_dir'] = wd
    utils.mkdir(wd, reset=True)
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # and set standard histalp values
    cfg.PARAMS['temp_melt'] = -1.75
    cfg.PARAMS['prcp_scaling_factor'] = 1.75

    gdir = workflow.init_glacier_regions(rgidf=rgi.split('_')[0],
                                         from_prepro_level=3,
                                         prepro_border=10)[0]
    # run histalp climate on glacier!
    tasks.process_histalp_data(gdir)

    f = gdir.get_filepath('climate_historical')
    with utils.ncDataset(f) as nc:
        refhgt = nc.ref_hgt

    mb = PastMassBalance(gdir, check_calib_params=False)

    df = pd.DataFrame()
    df2 = pd.DataFrame()

    for y in np.arange(1870, 2015):
        for i in np.arange(9, 12):
            flyear = utils.date_to_floatyear(y, i)
            tmp = mb.get_monthly_climate([refhgt], flyear)[0]
            df.loc[y, i] = tmp.mean()

        if return_prcp:
            for i in np.arange(3, 6):
                flyear = utils.date_to_floatyear(y, i)
                pcp = mb.get_monthly_climate([refhgt], flyear)[3]
                df2.loc[y, i] = tmp.mean()

    t99 = df.loc[1984:2014, :].mean().mean()
    t85 = df.loc[1870:1900, :].mean().mean()
    t2k = df.loc[1900:2000, :].mean().mean()

    if return_prcp:
        p99 = df2.loc[1984:2014, :].mean().mean()
        p85 = df2.loc[1870:1900, :].mean().mean()
        p2k = df2.loc[1900:2000, :].mean().mean()
        return t85, t99, t2k, p85, p99, p2k

    return t85, t99, t2k
Esempio n. 18
0
def test_coxe():

    testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_coxe')
    utils.mkdir(testdir)

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_RGI50-01.10299.tif')
    cfg.PARAMS['border'] = 40
    cfg.PARAMS['use_multiple_flowlines'] = False

    hef_file = get_demo_file('rgi_RGI50-01.10299.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir, reset=True)
    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)

    # Just check if the rest runs
    centerlines.compute_downstream_bedshape(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.apparent_mb_from_linear_mb(gdir)
    inversion.prepare_for_inversion(gdir)
    inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A,
                                                     'fs': 0})
    inversion.filter_inversion_output(gdir)

    flowline.init_present_time_glacier(gdir)

    fls = gdir.read_pickle('model_flowlines')

    p = gdir.read_pickle('linear_mb_params')
    mb_mod = massbalance.LinearMassBalanceModel(ela_h=p['ela_h'],
                                                grad=p['grad'])
    mb_mod.temp_bias = -0.3
    model = flowline.FluxBasedModel(fls, mb_model=mb_mod, y0=0,
                                    is_tidewater=True)

    # run
    model.run_until(200)
    assert model.calving_m3_since_y0 > 0

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdir, ax=ax, model=model)
    fig.tight_layout()
    return fig
Esempio n. 19
0
def get_test_dir():

    s = get_ident()
    out = os.path.join(cfg.PATHS['test_dir'], s)
    if 'PYTEST_XDIST_WORKER' in os.environ:
        out = os.path.join(out, os.environ.get('PYTEST_XDIST_WORKER'))
    mkdir(out)

    # If new ident, remove all other dirs so spare space
    for d in os.listdir(cfg.PATHS['test_dir']):
        if d and d != s:
            shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
    return out
Esempio n. 20
0
def get_test_dir():

    s = get_ident()
    out = os.path.join(cfg.PATHS['test_dir'], s)
    if 'PYTEST_XDIST_WORKER' in os.environ:
        out = os.path.join(out, os.environ.get('PYTEST_XDIST_WORKER'))
    mkdir(out)

    # If new ident, remove all other dirs so spare space
    for d in os.listdir(cfg.PATHS['test_dir']):
        if d and d != s:
            shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
    return out
Esempio n. 21
0
    def setUp(self):
        self.dldir = os.path.join(get_test_dir(), 'tmp_download')
        utils.mkdir(self.dldir)

        # Get the path to the file before we mess around
        self.dem3_testfile = utils.get_demo_file('T10.zip')

        cfg.initialize()
        cfg.PATHS['dl_cache_dir'] = os.path.join(self.dldir, 'dl_cache')
        cfg.PATHS['working_dir'] = os.path.join(self.dldir, 'wd')
        cfg.PATHS['tmp_dir'] = os.path.join(self.dldir, 'extract')
        cfg.PATHS['rgi_dir'] = os.path.join(self.dldir, 'rgi_test')
        cfg.PATHS['cru_dir'] = os.path.join(self.dldir, 'cru_test')
        self.reset_dir()
Esempio n. 22
0
    def test_rgi_intersects(self):

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi50')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V5_Intersects'),
                         fakefile='Intersects_OGGM_Manifest.txt')
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V5_Intersects',
                                      '11_rgi50_CentralEurope'),
                         fakefile='intersects_11_rgi50_CentralEurope.shp')
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V5_Intersects',
                                      '00_rgi50_AllRegs'),
                         fakefile='intersects_rgi50_AllRegs.shp')
        rgi_f = make_fake_zipdir(rgi_dir)

        def down_check(url, cache_name=None, reset=False):
            expected = ('https://cluster.klima.uni-bremen.de/~fmaussion/rgi/' +
                        'RGI_V5_Intersects.zip')
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_intersects_dir()
            utils.get_rgi_intersects_region_file('11', version='5')
            utils.get_rgi_intersects_region_file('00', version='5')

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi,
                                           'Intersects_OGGM_Manifest.txt'))

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi60')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V6_Intersects'),
                         fakefile='Intersects_OGGM_Manifest.txt')
        rgi_f = make_fake_zipdir(rgi_dir)

        def down_check(url, cache_name=None, reset=False):
            expected = ('https://cluster.klima.uni-bremen.de/~fmaussion/rgi/' +
                        'RGI_V6_Intersects.zip')
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_intersects_dir(version='6')

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi,
                                           'Intersects_OGGM_Manifest.txt'))
Esempio n. 23
0
def test_chhota_shigri():

    testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_chhota')
    utils.mkdir(testdir)

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_chhota_shigri.tif')
    cfg.PARAMS['border'] = 60
    cfg.set_divides_db(get_demo_file('divides_RGI50-14.15990.shp'))

    hef_file = get_demo_file('RGI50-14.15990.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir)
    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)

    # We should have two groups
    lines = gdir.read_pickle('downstream_lines', div_id=0)
    assert len(np.unique(lines.group)) == 2

    # Just check if the rest runs
    centerlines.compute_downstream_bedshape(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.apparent_mb_from_linear_mb(gdir)
    inversion.prepare_for_inversion(gdir)
    inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A,
                                                     'fs': 0})
    inversion.filter_inversion_output(gdir)

    flowline.init_present_time_glacier(gdir)

    fls = gdir.read_pickle('model_flowlines')
    for fl in fls:
        fl.thick = np.clip(fl.thick, 100, 1000)
    model = flowline.FlowlineModel(fls)

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdir, ax=ax, model=model)
    fig.tight_layout()
    return fig
Esempio n. 24
0
def single_node_example(run_for_test=False):
    y0 = 2008
    nyears = 100
    halfsize = 0
    mean_years = (2002, 2012)
    mtypes = ['scenew_ctl_3', 'sce_ctl_3']
    outpath = utils.mkdir(os.path.join(cluster_dir, 'Climate_3'))
    gdirs = pre_process_tasks(run_for_test=run_for_test)
    workflow.execute_entity_task(run_my_random_climate, gdirs, nyears=nyears,
                                 y0=y0, seed=1, halfsize=halfsize,
                                 output_filesuffix=f'_origin_hf{halfsize}',
                                 mean_years=mean_years)
    for mtype in mtypes:
        fpath_prcp_diff = os.path.join(data_dir, f'Precip_diff_{mtype}.nc')
        fpath_temp_diff = os.path.join(data_dir, f'T2m_diff_{mtype}.nc')
        workflow.execute_entity_task(run_my_random_climate, gdirs,
                                     nyears=nyears, y0=y0, seed=1,
                                     halfsize=halfsize,
                                     output_filesuffix=f'_exper_{mtype}_hf{halfsize}',
                                     fpath_temp_diff=fpath_temp_diff,
                                     fpath_prcp_diff=fpath_prcp_diff,
                                     mean_years=mean_years)

    output_list = []
    suffixes = [f'_origin_hf{halfsize}', f'_exper_{mtypes[0]}_hf{halfsize}',
                f'_exper_{mtypes[1]}_hf{halfsize}']
    for suffix in suffixes:
        path = os.path.join(outpath, 'result'+suffix+'.nc')
        output_list.append(utils.compile_run_output(gdirs, input_filesuffix=suffix, 
                                                    path=path, use_compression=True))
    
    # TODO: Test!
    a = output_list[0].volume.values
    print(a[-1, 2])
Esempio n. 25
0
def test_chhota_shigri():

    testdir = os.path.join(cfg.PATHS['test_dir'], 'tmp_chhota')
    utils.mkdir(testdir)

    # Init
    cfg.initialize()
    cfg.PATHS['dem_file'] = get_demo_file('dem_chhota_shigri.tif')
    cfg.PARAMS['border'] = 60
    cfg.set_divides_db(get_demo_file('divides_RGI50-14.15990.shp'))

    hef_file = get_demo_file('RGI50-14.15990.shp')
    entity = gpd.GeoDataFrame.from_file(hef_file).iloc[0]

    gdir = oggm.GlacierDirectory(entity, base_dir=testdir)
    gis.define_glacier_region(gdir, entity=entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)

    # We should have two groups
    lines = gdir.read_pickle('downstream_lines', div_id=0)
    assert len(np.unique(lines.group)) == 2

    # Just check if the rest runs
    centerlines.compute_downstream_bedshape(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_intersections(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.apparent_mb_from_linear_mb(gdir)
    inversion.prepare_for_inversion(gdir)
    inversion.volume_inversion(gdir, use_cfg_params={'glen_a': cfg.A, 'fs': 0})
    inversion.filter_inversion_output(gdir)

    flowline.init_present_time_glacier(gdir)

    fls = gdir.read_pickle('model_flowlines')
    for fl in fls:
        fl.thick = np.clip(fl.thick, 100, 1000)
    model = flowline.FlowlineModel(fls)

    fig, ax = plt.subplots()
    graphics.plot_modeloutput_map(gdir, ax=ax, model=model)
    fig.tight_layout()
    return fig
Esempio n. 26
0
def test_dir():
    """ Provides a reference to the test directory for the entire test session.
        Named after the current git revision.
        As a session-scoped fixture, this will only be created once and
        then injected to each test that depends on it.
    """
    s = get_ident()
    out = os.path.join(cfg.PATHS['test_dir'], s)
    if 'PYTEST_XDIST_WORKER' in os.environ:
        out = os.path.join(out, os.environ.get('PYTEST_XDIST_WORKER'))
    mkdir(out)

    # If new ident, remove all other dirs so spare space
    for d in os.listdir(cfg.PATHS['test_dir']):
        if d and d != s:
            shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))
    return out
Esempio n. 27
0
def run_with_job_array(y0, nyears, halfsize, mtype, prcp_prefix=None,
                       temp_prefix=None, run_for_test=False, mean_years=None,
                       output_dir=None, output_filesuffix=None):

    if output_dir is None:
        outpath = utils.mkdir(cluster_dir, reset=False)
    else:
        outpath = utils.mkdir(os.path.join(cluster_dir, output_dir),
                              reset=False)
    gdirs = pre_process_tasks(run_for_test=run_for_test)
    if mtype == 'origin':
        suffix = f'_origin_hf{halfsize}'
        workflow.execute_entity_task(run_my_random_climate, gdirs, 
                                     nyears=nyears, y0=y0, seed=1,
                                     halfsize=halfsize,
                                     output_filesuffix=f'_origin_hf{halfsize}',
                                     mean_years=mean_years)
    else:
        if CLIMATE_DATA == '2':
            mtype = '_' + mtype
        if prcp_prefix:
            fpath_prcp_diff = os.path.join(data_dir, f'{prcp_prefix}{mtype}.nc')
        else:
            fpath_prcp_diff = None

        if temp_prefix:
            fpath_temp_diff = os.path.join(data_dir, f'{temp_prefix}{mtype}.nc')
        else:
            fpath_temp_diff = None

        if output_filesuffix is None:
            output_filesuffix = f'_exper_{mtype}_hf{halfsize}'
        workflow.execute_entity_task(run_my_random_climate, gdirs, nyears=nyears,
                                     y0=y0, seed=1, halfsize=halfsize,
                                     output_filesuffix=output_filesuffix,
                                     mean_years=mean_years,
                                     fpath_temp_diff=fpath_temp_diff,
                                     fpath_prcp_diff=fpath_prcp_diff)

    ds = utils.compile_run_output(gdirs, input_filesuffix=output_filesuffix, path=False)
    # to avoid cluster stull problem report in:
    # https://github.com/OGGM/oggm/pull/1122 and 
    # https://github.com/pydata/xarray/issues/4710
    print(f"Save result{output_filesuffix}.nc")

    ds.load().to_netcdf(path=os.path.join(outpath, 'result'+output_filesuffix+'.nc'))
Esempio n. 28
0
File: tasks.py Progetto: OGGM/g2ti
def oggm_to_g2ti(gdir, dirname='final'):
    """From an oggm gdir to a g2ti tiff

    Parameters
    ----------
    """

    # Get the data
    grids_file = gdir.get_filepath('gridded_data')
    with netCDF4.Dataset(grids_file) as nc:
        with warnings.catch_warnings():
            # https://github.com/Unidata/netcdf4-python/issues/766
            warnings.filterwarnings("ignore", category=RuntimeWarning)
            thick = nc.variables['distributed_thickness'][:]

    dx = gdir.grid.dx
    vol = np.nansum(thick * dx**2)

    tpl_f = os.path.join(g2ti.geometry_dir, gdir.rgi_id[:8], gdir.rgi_id,
                         'dem.tif')
    dst = salem.GeoTiff(tpl_f)

    thick[~np.isfinite(thick)] = 0.

    dst_thick = dst.grid.map_gridded_data(thick,
                                          grid=gdir.grid,
                                          interp='spline')
    dst_thick[~np.isfinite(dst_thick)] = 0.

    # Conserve volume
    dx = dst.grid.dx
    dst_thick *= vol / np.nansum(dst_thick * dx**2)
    if not np.isclose(vol / np.nansum(dst_thick * dx**2), 1, atol=0.2):
        raise RuntimeError('Something went wrong in reproj.')

    with rasterio.open(tpl_f) as orig:
        # Set up profile for writing output
        profile = orig.profile

    ft = os.path.join(cfg.PATHS['working_dir'], dirname,
                      'RGI60-{}'.format(gdir.rgi_region))
    utils.mkdir(ft)
    ft = os.path.join(ft, 'thickness_{}.tif'.format(gdir.rgi_id))

    with rasterio.open(ft, 'w', **profile) as dest:
        dest.write(dst_thick.astype(np.float32).clip(0), 1)
Esempio n. 29
0
    def test_rgi(self):

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi50')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, '01_rgi50_Region'),
                         fakefile='test.txt')
        rgi_f = make_fake_zipdir(rgi_dir, fakefile='000_rgi50_manifest.txt')

        def down_check(url, cache_name=None, reset=False):
            expected = 'http://www.glims.org/RGI/rgi50_files/rgi50.zip'
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_dir()

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi, '000_rgi50_manifest.txt'))
        assert os.path.exists(os.path.join(rgi, '01_rgi50_Region', 'test.txt'))

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi60')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, '01_rgi60_Region'),
                         fakefile='01_rgi60_Region.shp')
        rgi_f = make_fake_zipdir(rgi_dir, fakefile='000_rgi60_manifest.txt')

        def down_check(url, cache_name=None, reset=False):
            expected = 'http://www.glims.org/RGI/rgi60_files/00_rgi60.zip'
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_dir(version='6')

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi, '000_rgi60_manifest.txt'))
        assert os.path.exists(
            os.path.join(rgi, '01_rgi60_Region', '01_rgi60_Region.shp'))

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi_f = utils.get_rgi_region_file('01', version='6')

        assert os.path.exists(rgi_f)
        assert '01_rgi60_Region.shp' in rgi_f
Esempio n. 30
0
def merge_intersects():
    # Download RGI files
    fp = 'intersects_*_rgi' + rgi_version + '0_*.shp'
    shps = list(glob(os.path.join(OUTDIR_INTERSECTS, "*", fp)))
    assert len(shps) == 19
    out = []
    for sh in sorted(shps):
        sh = gpd.read_file(sh)
        out.append(sh)
    # Make a new dataframe of those
    inter = pd.concat(out)
    inter.csr = sh.crs  # for geolocalisation

    odir = '00_rgi' + rgi_version + '0_AllRegs'
    odir = os.path.join(OUTDIR_INTERSECTS, odir)
    mkdir(odir)
    ofile = os.path.join(odir, 'intersects_rgi' + rgi_version + '0_AllRegs.shp')
    inter.to_file(ofile)
Esempio n. 31
0
def _rename_dem_folder(gdir, source=''):
    """Put the DEM files in a subfolder of the gdir.

    Parameters
    ----------
    gdir : GlacierDirectory
    source : str
        the DEM source
    """

    # open tif-file to check if it's worth it
    dem_f = gdir.get_filepath('dem')
    try:
        dem = gis.read_geotiff_dem(gdir)
    except IOError:
        # Error reading file, no problem - still, delete the file if needed
        if os.path.exists(dem_f):
            os.remove(dem_f)
        gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source),
                 err=InvalidDEMError('File does not exist'))
        return

    # Check the DEM
    isfinite = np.isfinite(dem)
    if np.all(~isfinite) or (np.min(dem) == np.max(dem)):
        # Remove the file and return
        if os.path.exists(dem_f):
            os.remove(dem_f)
        gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source),
                 err=InvalidDEMError('DEM does not contain more than one '
                                     'valid values.'))
        return

    # Create a source dir and move the files
    out = os.path.join(gdir.dir, source)
    utils.mkdir(out)
    for fname in ['dem', 'dem_source']:
        f = gdir.get_filepath(fname)
        os.rename(f, os.path.join(out, os.path.basename(f)))

    # log SUCCESS for this DEM source
    gdir.log('{},DEM SOURCE,{}'.format(gdir.rgi_id, source))
Esempio n. 32
0
def find_possible_glaciers(gdir, y0, ye, n):

    path = os.path.join(gdir.dir, 'result' + str(y0) + '.pkl')

    # if results are already there and number of candidates are the same, don't run it again
    if os.path.isfile(path):
        results = pd.read_pickle(path, compression='gzip')
        return results
        '''
        if len(results) == n:
            return results
        '''

    # 1. Generation of possible glacier states
    #    - Run random climate over 400 years with different temperature biases
    random_list = generation(gdir, y0)

    # 2. Identification of glacier candidates
    #    - Determine t_stag(begin of stagnation period)
    #    - Classification by volume (n equidistantly distributed classes)
    #    - Select one candidate by class
    #
    candidate_list = identification(gdir, random_list, ys=y0, ye=ye, n=n)

    # 3. Evaluation
    #    - Run each candidate forward from y0 to ye
    #    - Evaluate candidates based on the fitnessfunction
    #    - Save all models in pd.DataFrame and write pickle
    #    - copy all model_run files to tarfile
    results = evaluation(gdir, candidate_list, y0, ye)

    # move all model_run* files from year y0 to a new directory --> avoids
    # that thousand of thousands files are created in gdir.dir
    utils.mkdir(os.path.join(gdir.dir, str(y0)), reset=False)
    for file in os.listdir(gdir.dir):
        if file.startswith('model_run' + (str(y0))):
            os.rename(os.path.join(gdir.dir, file),
                      os.path.join(gdir.dir, str(y0), file))
        elif file.startswith('model_diagnostics' + (str(y0))):
            os.remove(os.path.join(gdir.dir, file))

    return results
Esempio n. 33
0
    def test_rgi_intersects(self):

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi50')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V5_Intersects'),
                         fakefile='Intersects_OGGM_Manifest.txt')
        rgi_f = make_fake_zipdir(rgi_dir)

        def down_check(url, cache_name=None, reset=False):
            expected = ('https://www.dropbox.com/s/y73sdxygdiq7whv/' +
                        'RGI_V5_Intersects.zip?dl=1')
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_intersects_dir()

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi,
                                           'Intersects_OGGM_Manifest.txt'))

        # Make a fake RGI file
        rgi_dir = os.path.join(self.dldir, 'rgi60')
        utils.mkdir(rgi_dir)
        make_fake_zipdir(os.path.join(rgi_dir, 'RGI_V6_Intersects'),
                         fakefile='Intersects_OGGM_Manifest.txt')
        rgi_f = make_fake_zipdir(rgi_dir)

        def down_check(url, cache_name=None, reset=False):
            expected = ('https://www.dropbox.com/s/vawryxl8lkzxowu/' +
                        'RGI_V6_Intersects.zip?dl=1')
            self.assertEqual(url, expected)
            return rgi_f

        with FakeDownloadManager('_progress_urlretrieve', down_check):
            rgi = utils.get_rgi_intersects_dir(version='6')

        assert os.path.isdir(rgi)
        assert os.path.exists(os.path.join(rgi,
                                           'Intersects_OGGM_Manifest.txt'))
Esempio n. 34
0
def get_test_dir():

    global _TEST_DIR

    if _TEST_DIR is None:
        s = get_git_ident()
        s += ''.join([str(k) + str(v) for k, v in get_sys_info()])
        s += ''.join([str(k) + str(v) for k, v in get_env_info()])
        s = hashlib.md5(s.encode()).hexdigest()
        out = os.path.join(cfg.PATHS['test_dir'], s)
        if 'PYTEST_XDIST_WORKER' in os.environ:
            out = os.path.join(out, os.environ.get('PYTEST_XDIST_WORKER'))
        mkdir(out)
        _TEST_DIR = out

        # If new ident, remove all other dirs so spare space
        for d in os.listdir(cfg.PATHS['test_dir']):
            if d and d != s:
                shutil.rmtree(os.path.join(cfg.PATHS['test_dir'], d))

    return _TEST_DIR
Esempio n. 35
0
def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
                  output_folder='', working_dir='', is_test=False,
                  test_rgidf=None, test_intersects_file=None,
                  test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    odf = pd.DataFrame()

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    base_dir = os.path.join(output_folder)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    # Read RGI
    start = time.time()
    if test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(2)
    _add_time_to_df(odf, 'Read RGI', time.time()-start)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # Initialize working directories
    start = time.time()
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
    _add_time_to_df(odf, 'init_glacier_regions', time.time()-start)

    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    # Tasks
    task_list = [
        tasks.process_cru_data,
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
        tasks.init_present_time_glacier,
    ]
    for task in task_list:
        start = time.time()
        workflow.execute_entity_task(task, gdirs)
        _add_time_to_df(odf, task.__name__, time.time()-start)

    # Runs
    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, bias=0, seed=0,
                                 output_filesuffix='_tstar')
    _add_time_to_df(odf, 'run_random_climate_tstar_250', time.time()-start)

    start = time.time()
    workflow.execute_entity_task(tasks.run_random_climate, gdirs,
                                 nyears=250, y0=1995, seed=0,
                                 output_filesuffix='_commit')
    _add_time_to_df(odf, 'run_random_climate_commit_250', time.time()-start)

    # Compile results
    start = time.time()
    utils.compile_glacier_statistics(gdirs)
    _add_time_to_df(odf, 'compile_glacier_statistics', time.time()-start)

    start = time.time()
    utils.compile_climate_statistics(gdirs,
                                     add_climate_period=[1920, 1960, 2000])
    _add_time_to_df(odf, 'compile_climate_statistics', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_tstar')
    _add_time_to_df(odf, 'compile_run_output_tstar', time.time()-start)

    start = time.time()
    utils.compile_run_output(gdirs, filesuffix='_commit')
    _add_time_to_df(odf, 'compile_run_output_commit', time.time()-start)

    # Log
    opath = os.path.join(base_dir, 'benchmarks_b{:03d}.csv'.format(border))
    odf.index.name = 'Task'
    odf.to_csv(opath)
    log.workflow('OGGM benchmarks is done!')
Esempio n. 36
0
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils

# Initialize OGGM
cfg.initialize()

# Local paths (where to write output and where to download input)
DATA_DIR = '/home/mowglie/disk/OGGM_INPUT'
WORKING_DIR = '/home/mowglie/disk/OGGM_RUNS/TEST_DEMS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')

cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')
utils.mkdir(WORKING_DIR)
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
cfg.CONTINUE_ON_ERROR = False

# Read in the RGI file
rgisel = os.path.join(WORKING_DIR, 'rgi_selection.shp')
if not os.path.exists(rgisel):
    rgi_dir = utils.get_rgi_dir()
    regions = ['{:02d}'.format(int(p)) for p in range(1, 20)]
    files = [glob.glob(os.path.join(rgi_dir, '*', r + '_rgi50_*.shp'))[0] for r in regions]
    rgidf = []
Esempio n. 37
0
def prepare_divides(rgi_f):
    """Processes the rgi file and writes the intersects to OUTDIR"""

    rgi_reg = os.path.basename(rgi_f).split('_')[0]

    print('Start RGI reg ' + rgi_reg + ' ...')
    start_time = time.time()

    wgms, _ = get_wgms_files()
    f = glob(INDIR_DIVIDES + '*/*-' + rgi_reg + '.shp')[0]

    df = gpd.read_file(f)
    rdf = gpd.read_file(rgi_f)

    # Read glacier attrs
    key2 = {'0': 'Land-terminating',
            '1': 'Marine-terminating',
            '2': 'Lake-terminating',
            '3': 'Dry calving',
            '4': 'Regenerated',
            '5': 'Shelf-terminating',
            '9': 'Not assigned',
            }
    TerminusType = [key2[gtype[1]] for gtype in df.GlacType]
    IsTidewater = np.array([ttype in ['Marine-terminating', 'Lake-terminating']
                            for ttype in TerminusType])

    # Plots
    # dfref = df.loc[df.RGIId.isin(wgms.RGI50_ID)]
    # for gid in np.unique(dfref.GLIMSId):
    #     dfs = dfref.loc[dfref.GLIMSId == gid]
    #     dfs.plot(cmap='Set3', linestyle='-', linewidth=5);

    # Filter
    df = df.loc[~IsTidewater]
    df = df.loc[~df.RGIId.isin(wgms.RGI50_ID)]

    df['CenLon'] = pd.to_numeric(df['CenLon'])
    df['CenLat'] = pd.to_numeric(df['CenLat'])
    df['Area'] = pd.to_numeric(df['Area'])

    # Correct areas and stuffs
    n_gl_before = len(df)
    divided_ids = []
    for rid in np.unique(df.RGIId):
        sdf = df.loc[df.RGIId == rid].copy()
        srdf = rdf.loc[rdf.RGIId == rid]

        # Correct Area
        sdf.Area = np.array([float(a) for a in sdf.Area])

        geo_is_ok = []
        new_geo = []
        for g, a in zip(sdf.geometry, sdf.Area):
            if a < 0.01*1e6:
                geo_is_ok.append(False)
                continue
            try:
                new_geo.append(multi_to_poly(g))
                geo_is_ok.append(True)
            except:
                geo_is_ok.append(False)

        sdf = sdf.loc[geo_is_ok]
        if len(sdf) < 2:
            # print(rid + ' is too small or has no valid divide...')
            df = df[df.RGIId != rid]
            continue

        area_km = sdf.Area * 1e-6

        cor_factor = srdf.Area.values / np.sum(area_km)
        if cor_factor > 1.2 or cor_factor < 0.8:
            # print(rid + ' is not OK...')
            df = df[df.RGIId != rid]
            continue
        area_km = cor_factor * area_km

        # Correct Centroid
        cenlon = [g.centroid.xy[0][0] for g in sdf.geometry]
        cenlat = [g.centroid.xy[1][0] for g in sdf.geometry]

        # ID
        new_id = [rid + '_d{:02}'.format(i + 1) for i in range(len(sdf))]

        # Write
        df.loc[sdf.index, 'Area'] = area_km
        df.loc[sdf.index, 'CenLon'] = cenlon
        df.loc[sdf.index, 'CenLat'] = cenlat
        df.loc[sdf.index, 'RGIId'] = new_id
        df.loc[sdf.index, 'geometry'] = new_geo

        divided_ids.append(rid)

    n_gl_after = len(df)

    # We make three data dirs: divides only, divides into rgi, divides + RGI
    bn = os.path.basename(rgi_f)
    bd = os.path.basename(os.path.dirname(rgi_f))
    base_dir_1 = OUTDIR_DIVIDES + '/RGIV5_DividesOnly/' + bd
    base_dir_2 = OUTDIR_DIVIDES + '/RGIV5_Corrected/' + bd
    base_dir_3 = OUTDIR_DIVIDES + '/RGIV5_OrigAndDivides/' + bd
    mkdir(base_dir_1, reset=True)
    mkdir(base_dir_2, reset=True)
    mkdir(base_dir_3, reset=True)

    df.to_file(os.path.join(base_dir_1, bn))

    dfa = pd.concat([df, rdf]).sort_values('RGIId')
    dfa.to_file(os.path.join(base_dir_3, bn))

    dfa = dfa.loc[~dfa.RGIId.isin(divided_ids)]
    dfa.to_file(os.path.join(base_dir_2, bn))

    print('RGI reg ' + rgi_reg +
          ' took {:.2f} seconds. We had to remove '
          '{} divides'.format(time.time() - start_time,
                              n_gl_before - n_gl_after))
    return
Esempio n. 38
0
def compute_intersects(rgi_shp):
    """Processes the rgi file and writes the intersects to OUTDIR"""

    out_path = os.path.basename(rgi_shp)
    odir = os.path.basename(os.path.dirname(rgi_shp))
    odir = os.path.join(OUTDIR_INTERSECTS, odir)
    mkdir(odir)
    out_path = os.path.join(odir, 'intersects_' + out_path)

    print('Start ' + os.path.basename(rgi_shp) + ' ...')
    start_time = time.time()

    gdf = gpd.read_file(rgi_shp)

    # clean geometries like OGGM does
    ngeos = []
    keep = []
    for g in gdf.geometry:
        try:
            g = multi_to_poly(g)
            ngeos.append(g)
            keep.append(True)
        except:
            keep.append(False)
    gdf = gdf.loc[keep]
    gdf['geometry'] = ngeos

    out_cols = ['RGIId_1', 'RGIId_2', 'geometry']
    out = gpd.GeoDataFrame(columns=out_cols)

    for i, major in gdf.iterrows():

        # Exterior only
        major_poly = major.geometry.exterior

        # sort by distance to the current glacier
        gdf['dis'] = haversine(major.CenLon, major.CenLat,
                               gdf.CenLon, gdf.CenLat)
        gdfs = gdf.sort_values(by='dis').iloc[1:]

        # Keep glaciers in which intersect
        gdfs = gdfs.loc[gdfs.dis < 200000]
        try:
            gdfs = gdfs.loc[gdfs.intersects(major_poly)]
        except:
            gdfs = gdfs.loc[gdfs.intersects(major_poly.buffer(0))]

        for i, neighbor in gdfs.iterrows():

            if neighbor.RGIId in out.RGIId_1 or neighbor.RGIId in out.RGIId_2:
                continue

            # Exterior only
            # Buffer is needed for numerical reasons
            neighbor_poly = neighbor.geometry.exterior.buffer(0.0001)

            # Go
            try:
                mult_intersect = major_poly.intersection(neighbor_poly)
            except:
                continue

            if isinstance(mult_intersect, shpg.Point):
                continue
            if isinstance(mult_intersect, shpg.linestring.LineString):
                mult_intersect = [mult_intersect]
            if len(mult_intersect) == 0:
                continue
            mult_intersect = [m for m in mult_intersect if
                              not isinstance(m, shpg.Point)]
            if len(mult_intersect) == 0:
                continue
            mult_intersect = linemerge(mult_intersect)
            if isinstance(mult_intersect, shpg.linestring.LineString):
                mult_intersect = [mult_intersect]
            for line in mult_intersect:
                assert isinstance(line, shpg.linestring.LineString)
                line = gpd.GeoDataFrame([[major.RGIId, neighbor.RGIId, line]],
                                        columns=out_cols)
                out = out.append(line)

    out.crs = wgs84.srs
    out.to_file(out_path)

    print(os.path.basename(rgi_shp) +
          ' took {0:.2f} seconds'.format(time.time() - start_time))
    return
Esempio n. 39
0

if RUN_inAWS:
    # TODO: this paths are going to change with the new cluster
    WORKING_DIR = '~/work_dir/'
    DATA_DIR = '~/input_data/'
    RGI_FILE = '~/Sub_region4/Sub_region4.shp'
    GLATHIDA_FILE = '~/input_data/rgi_glathida_links_2014_RGIV54.csv'

cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['cru_dir'] = os.path.join(DATA_DIR, 'cru')
cfg.PATHS['glathida_rgi_links'] = GLATHIDA_FILE

# Create directories
utils.mkdir(cfg.PATHS['working_dir'])
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['cru_dir'])

# Use multiprocessing? Change this to run in the AWS
cfg.PARAMS['use_multiprocessing'] = False
cfg.CONTINUE_ON_ERROR = False

# Other params
cfg.PARAMS['border'] = 80
cfg.PARAMS['temp_use_local_gradient'] = False
cfg.PARAMS['optimize_inversion_params'] = True
cfg.PARAMS['invert_with_sliding'] = False
cfg.PARAMS['bed_shape'] = 'parabolic'
cfg.PARAMS['use_compression'] = False
Esempio n. 40
0
log = logging.getLogger(__name__)

# Initialize OGGM and set up the default run parameters
cfg.initialize(logging_level='WORKFLOW')
rgi_version = '61'
rgi_region = '11'  # Region Central Europe

# Here we override some of the default parameters
# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large:
# here, 80 is more than enough
cfg.PARAMS['border'] = 80

# Local working directory (where OGGM will write its output)
WORKING_DIR = utils.gettempdir('OGGM_Rofental')
utils.mkdir(WORKING_DIR, reset=True)
cfg.PATHS['working_dir'] = WORKING_DIR

# RGI file
path = utils.get_rgi_region_file(rgi_region, version=rgi_version)
rgidf = gpd.read_file(path)

# Get the Rofental Basin file
path = utils.get_demo_file('rofental_hydrosheds.shp')
basin = gpd.read_file(path)

# Take all glaciers in the Rofental Basin
in_bas = [basin.geometry.contains(shpg.Point(x, y))[0] for
          (x, y) in zip(rgidf.CenLon, rgidf.CenLat)]
rgidf = rgidf.loc[in_bas]
import oggm.cfg as cfg
from oggm import workflow
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils
from oggm.core.models import flowline

# Initialize OGGM and set up the run parameters
# ---------------------------------------------

cfg.initialize()

# Local paths (where to write the OGGM run output)
WORKING_DIR = '/home/mowglie/disk/OGGM_Runs/EXAMPLE_GLACIERS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')
utils.mkdir(WORKING_DIR)
cfg.PATHS['working_dir'] = WORKING_DIR

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True

# How many grid points around the glacier?
# Make it large if you expect your glaciers to grow large
cfg.PARAMS['border'] = 60

# This is the default in OGGM
cfg.PARAMS['prcp_scaling_factor'] = 2.5

# Set to True for operational runs
cfg.CONTINUE_ON_ERROR = False
cfg.PARAMS['auto_skip_task'] = False
Esempio n. 42
0
# Initialize OGGM
cfg.initialize()

# Local paths (where to write output and where to download input)
WORKING_DIR = '/work/ubuntu/run_alps'
DATA_DIR = '/work/ubuntu/oggm-data'

cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['cru_dir'] = os.path.join(DATA_DIR, 'cru')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')

# Currently OGGM wants some directories to exist
# (maybe I'll change this but it can also catch errors in the user config)
utils.mkdir(cfg.PATHS['working_dir'])
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['cru_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True
cfg.CONTINUE_ON_ERROR = True

# Other params
cfg.PARAMS['border'] = 80
cfg.PARAMS['temp_use_local_gradient'] = False
cfg.PARAMS['invert_with_sliding'] = False
cfg.PARAMS['bed_shape'] = 'mixed'

# Download RGI files
Esempio n. 43
0
 def reset_dir(self):
     if os.path.exists(TEST_DIR):
         shutil.rmtree(TEST_DIR)
     utils.mkdir(cfg.PATHS['dl_cache_dir'])
     utils.mkdir(cfg.PATHS['working_dir'])
     utils.mkdir(cfg.PATHS['tmp_dir'])
Esempio n. 44
0
import os
import shutil
import numpy as np
from oggm.tests.funcs import init_hef, get_test_dir
from oggm import utils, tasks
from oggm.core import massbalance


testdir = os.path.join(get_test_dir(), 'benchmarks')
utils.mkdir(testdir, reset=True)
heights = np.linspace(2200, 3600, 120)
years = np.arange(151) + 1850


def teardown():
    if os.path.exists(testdir):
        shutil.rmtree(testdir)


def setup():
    global gdir
    gdir = init_hef(border=80)
    teardown()
    gdir = tasks.copy_to_basedir(gdir, base_dir=testdir, setup='all')


def time_PastMassBalance():

    mb_mod = massbalance.PastMassBalance(gdir, bias=0)
    for yr in years:
        mb_mod.get_annual_mb(heights, year=yr)
Esempio n. 45
0
import time
import shutil

import salem
import numpy as np
import pandas as pd
from numpy.testing import assert_array_equal, assert_allclose

import oggm
from oggm import utils
from oggm import cfg
from oggm.tests import is_download

# Globals
TEST_DIR = os.path.join(cfg.PATHS['test_dir'], 'tmp_download')
utils.mkdir(TEST_DIR)

# In case some logging happens or so
cfg.PATHS['working_dir'] = cfg.PATHS['test_dir']

class TestFuncs(unittest.TestCase):

    def setUp(self):
        pass

    def tearDown(self):
        pass

    def test_signchange(self):
        ts = pd.Series([-2., -1., 1., 2., 3], index=np.arange(5))
        sc = utils.signchange(ts)
Esempio n. 46
0
def run_prepro_levels(rgi_version=None, rgi_reg=None, border=None,
                      output_folder='', working_dir='', is_test=False,
                      demo=False, test_rgidf=None, test_intersects_file=None,
                      test_topofile=None, test_crudir=None):
    """Does the actual job.

    Parameters
    ----------
    rgi_version : str
        the RGI version to use (defaults to cfg.PARAMS)
    rgi_reg : str
        the RGI region to process
    border : int
        the number of pixels at the maps border
    output_folder : str
        path to the output folder (where to put the preprocessed tar files)
    working_dir : str
        path to the OGGM working directory
    is_test : bool
        to test on a couple of glaciers only!
    demo : bool
        to run the prepro for the list of demo glaciers
    test_rgidf : shapefile
        for testing purposes only
    test_intersects_file : shapefile
        for testing purposes only
    test_topofile : str
        for testing purposes only
    test_crudir : str
        for testing purposes only
    """

    # TODO: temporarily silence Fiona deprecation warnings
    import warnings
    warnings.filterwarnings("ignore", category=DeprecationWarning)

    # Module logger
    log = logging.getLogger(__name__)

    # Time
    start = time.time()

    # Initialize OGGM and set up the run parameters
    cfg.initialize(logging_level='WORKFLOW')

    # Local paths
    utils.mkdir(working_dir)
    cfg.PATHS['working_dir'] = working_dir

    # Use multiprocessing?
    cfg.PARAMS['use_multiprocessing'] = True

    # How many grid points around the glacier?
    # Make it large if you expect your glaciers to grow large
    cfg.PARAMS['border'] = border

    # Set to True for operational runs
    cfg.PARAMS['continue_on_error'] = True

    # For statistics
    climate_periods = [1920, 1960, 2000]

    if rgi_version is None:
        rgi_version = cfg.PARAMS['rgi_version']
    rgi_dir_name = 'RGI{}'.format(rgi_version)
    border_dir_name = 'b_{:03d}'.format(border)
    base_dir = os.path.join(output_folder, rgi_dir_name, border_dir_name)

    # Add a package version file
    utils.mkdir(base_dir)
    opath = os.path.join(base_dir, 'package_versions.txt')
    with open(opath, 'w') as vfile:
        vfile.write(utils.show_versions(logger=log))

    if demo:
        rgidf = utils.get_rgi_glacier_entities(cfg.DEMO_GLACIERS.index)
    elif test_rgidf is None:
        # Get the RGI file
        rgidf = gpd.read_file(utils.get_rgi_region_file(rgi_reg,
                                                        version=rgi_version))
        # We use intersects
        rgif = utils.get_rgi_intersects_region_file(rgi_reg,
                                                    version=rgi_version)
        cfg.set_intersects_db(rgif)
    else:
        rgidf = test_rgidf
        cfg.set_intersects_db(test_intersects_file)

    if is_test:
        # Just for fun
        rgidf = rgidf.sample(4)

    # Sort for more efficient parallel computing
    rgidf = rgidf.sort_values('Area', ascending=False)

    log.workflow('Starting prepro run for RGI reg: {} '
                 'and border: {}'.format(rgi_reg, border))
    log.workflow('Number of glaciers: {}'.format(len(rgidf)))

    # Input
    if test_topofile:
        cfg.PATHS['dem_file'] = test_topofile

    # L1 - initialize working directories
    gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L1', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L1 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L1')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L2 - Tasks
    # Pre-download other files just in case
    if test_crudir is None:
        _ = utils.get_cru_file(var='tmp')
        _ = utils.get_cru_file(var='pre')
    else:
        cfg.PATHS['cru_dir'] = test_crudir

    workflow.execute_entity_task(tasks.process_cru_data, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L2', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # L2 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L2')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L3 - Tasks
    task_list = [
        tasks.glacier_masks,
        tasks.compute_centerlines,
        tasks.initialize_flowlines,
        tasks.compute_downstream_line,
        tasks.compute_downstream_bedshape,
        tasks.catchment_area,
        tasks.catchment_intersections,
        tasks.catchment_width_geom,
        tasks.catchment_width_correction,
        tasks.local_t_star,
        tasks.mu_star_calibration,
        tasks.prepare_for_inversion,
        tasks.mass_conservation_inversion,
        tasks.filter_inversion_output,
    ]
    for task in task_list:
        workflow.execute_entity_task(task, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L3', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)
    opath = os.path.join(sum_dir, 'climate_statistics_{}.csv'.format(rgi_reg))
    utils.compile_climate_statistics(gdirs, add_climate_period=climate_periods,
                                     path=opath)

    # L3 OK - compress all in output directory
    l_base_dir = os.path.join(base_dir, 'L3')
    workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False,
                                 base_dir=l_base_dir)
    utils.base_dir_to_tar(l_base_dir)

    # L4 - Tasks
    workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)

    # Glacier stats
    sum_dir = os.path.join(base_dir, 'L4', 'summary')
    utils.mkdir(sum_dir)
    opath = os.path.join(sum_dir, 'glacier_statistics_{}.csv'.format(rgi_reg))
    utils.compile_glacier_statistics(gdirs, path=opath)

    # Copy mini data to new dir
    base_dir = os.path.join(base_dir, 'L4')
    mini_gdirs = workflow.execute_entity_task(tasks.copy_to_basedir, gdirs,
                                              base_dir=base_dir)

    # L4 OK - compress all in output directory
    workflow.execute_entity_task(utils.gdir_to_tar, mini_gdirs, delete=True)
    utils.base_dir_to_tar(base_dir)

    # Log
    m, s = divmod(time.time() - start, 60)
    h, m = divmod(m, 60)
    log.workflow('OGGM prepro_levels is done! Time needed: '
                 '{:02d}:{:02d}:{:02d}'.format(int(h), int(m), int(s)))
Esempio n. 47
0
    def newplotfunc(gdirs, ax=None, smap=None, add_colorbar=True, title=None,
                    title_comment=None, horizontal_colorbar=False,
                    lonlat_contours_kwargs=None, cbar_ax=None, autosave=False,
                    add_scalebar=True, figsize=None, savefig=None,
                    savefig_kwargs=None,
                    **kwargs):

        dofig = False
        if ax is None:
            fig = plt.figure(figsize=figsize)
            ax = fig.add_subplot(111)
            dofig = True

        # Cast to list
        gdirs = utils.tolist(gdirs)

        if smap is None:
            mp = salem.Map(gdirs[0].grid, countries=False,
                           nx=gdirs[0].grid.nx)
        else:
            mp = smap

        if lonlat_contours_kwargs is not None:
            mp.set_lonlat_contours(**lonlat_contours_kwargs)

        if add_scalebar:
            mp.set_scale_bar()
        out = plotfunc(gdirs, ax=ax, smap=mp, **kwargs)

        if add_colorbar and 'cbar_label' in out:
            cbprim = out.get('cbar_primitive', mp)
            if cbar_ax:
                cb = cbprim.colorbarbase(cbar_ax)
            else:
                if horizontal_colorbar:
                    cb = cbprim.append_colorbar(ax, "bottom", size="5%",
                                                pad=0.4)
                else:
                    cb = cbprim.append_colorbar(ax, "right", size="5%",
                                                pad=0.2)
            cb.set_label(out['cbar_label'])

        if title is None:
            if 'title' not in out:
                # Make a defaut one
                title = ''
                if len(gdirs) == 1:
                    gdir = gdirs[0]
                    title = gdir.rgi_id
                    if gdir.name is not None and gdir.name != '':
                        title += ': ' + gdir.name
                out['title'] = title

            if title_comment is None:
                title_comment = out.get('title_comment', '')

            out['title'] += title_comment
            ax.set_title(out['title'])
        else:
            ax.set_title(title)

        if dofig:
            plt.tight_layout()

        if autosave:
            savefig = os.path.join(cfg.PATHS['working_dir'], 'plots')
            utils.mkdir(savefig)
            savefig = os.path.join(savefig, plotfunc.__name__ + '_' +
                                   gdirs[0].rgi_id + '.png')

        if savefig is not None:
            plt.savefig(savefig, **savefig_kwargs)
            plt.close()