Exemplo n.º 1
0
def get_first_guess_surface_h(data_logger):
    """TODO: Function to conduct spinup for first guess surface_h"""
    fl = data_logger.flowline_init

    if 'surface_h' in list(data_logger.spinup_options.keys()):
        mb_options = data_logger.spinup_options['surface_h']['mb_model']
        if mb_options['type'] == 'constant':
            yr_start_run = mb_options['years'][0]
            yr_end_run = mb_options['years'][1]
            halfsize = (yr_end_run - yr_start_run) / 2
            mb_spinup = MultipleFlowlineMassBalance(
                data_logger.gdir,
                fls=[fl],
                mb_model_class=ConstantMassBalance,
                filename='climate_historical',
                input_filesuffix='',
                y0=yr_start_run + halfsize,
                halfsize=halfsize)
            mb_spinup.temp_bias = mb_options['t_bias']

            model = FluxBasedModel(copy.deepcopy([fl]),
                                   mb_spinup,
                                   y0=yr_start_run)
            model.run_until(yr_end_run)

            return model.fls[0].surface_h
        else:
            raise NotImplementedError(
                f'mb type {mb_options["type"]} not implemented!')

    else:
        raise NotImplementedError(
            'The provided spinup option is not implemented!')
Exemplo n.º 2
0
            # if observation record longer than rgi_date: create new model which can be run until observation record
            if refmb.index[-1]>years[-1]:
                mod.run_until(t_0)
                tasks.run_from_climate_data(gdir, ys=t_0, ye=refmb.index[-1],
                                            init_model_fls=copy.deepcopy(mod.fls),
                                            output_filesuffix='_until_refmb', bias=bias)
                mod = FileModel(gdir.get_filepath('model_run', filesuffix='_until_refmb'))

            # get mass balance from volume difference
            df.loc[:-1, 'OGGM_dv'] = mod.volume_m3_ts().diff() * cfg.PARAMS['ice_density'] / mod.area_m2_ts()
            df = df.shift(-1)

            for yr in  mod.volume_km3_ts().index:
                mod.run_until(yr)
                mb = MultipleFlowlineMassBalance(gdir,fls=copy.deepcopy( mod.fls),
                                                 mb_model_class=PastMassBalance, bias=bias)
                df.loc[yr, 'OGGM_mb']=mb.get_specific_mb(year=[mod.yr])

        df.loc[:, 'WGMS'] = refmb.ANNUAL_BALANCE
        df.index = df.index.astype(int)

        # difference between Mass Balance and volume delta
        rmse_d = np.sqrt(((df.OGGM_mb-df.OGGM_dv)**2).mean())
        max_d = (df.OGGM_mb-df.OGGM_dv).abs().max()
        delta_diff.loc[gdir.rgi_id, 'region'] = REGION
        delta_diff.loc[gdir.rgi_id, 'rmse'] = rmse_d
        delta_diff.loc[gdir.rgi_id, 'max_diff'] = max_d
        delta_diff.loc[gdir.rgi_id, 'temp_bias'] = temp_bias

        # difference between modelled and observed mass balance
        df = df.dropna(subset=['WGMS'])
Exemplo n.º 3
0
                            bias=bias)
                        mod = FileModel(
                            gdir.get_filepath('model_run',
                                              filesuffix='_until_refmb'))

                # get modelled mass balance from volume difference
                df.loc[:-1, 'OGGM_dv'] = mod.volume_m3_ts().diff(
                ) * cfg.PARAMS['ice_density'] / mod.area_m2_ts()
                df = df.shift(-1)

                # get mass balance from MassBalanceModel
                for yr in mod.volume_km3_ts().index:
                    mod.run_until(yr)
                    mb = MultipleFlowlineMassBalance(
                        gdir,
                        fls=deepcopy(mod.fls),
                        mb_model_class=PastMassBalance,
                        bias=bias,
                        check_calib_params=False)

                    df.loc[yr, 'OGGM_mb'] = mb.get_specific_mb(year=[mod.yr])

                # set WGMS data
                df.loc[:, 'WGMS'] = refmb.ANNUAL_BALANCE
                df.index = df.index.astype(int)
                df1 = df.dropna().drop('OGGM_dv', axis=1).transpose()
                df1.loc['OGGM_mb', 'kind'] = 'OGGM'
                df1.loc['WGMS', 'kind'] = 'WGMS'
                df1 = df1.rename(index={
                    'OGGM_mb': gdir.rgi_id,
                    'WGMS': gdir.rgi_id
                })
Exemplo n.º 4
0
def run_ensemble(allgdirs,
                 rgi_id,
                 ensemble,
                 tbiasdict,
                 allmeta,
                 storedir,
                 runsuffix='',
                 spinup_y0=1999):

    # default glena
    default_glena = 2.4e-24

    # loop over all combinations
    for nr, run in enumerate(ensemble):

        pdict = ast.literal_eval('{' + run + '}')
        cfg.PARAMS['glen_a'] = pdict['glena_factor'] * default_glena
        cfg.PARAMS['inversion_glen_a'] = pdict['glena_factor'] * default_glena
        mbbias = pdict['mbbias']
        cfg.PARAMS['prcp_scaling_factor'] = pdict['prcp_scaling_factor']

        log.info('Current parameter combination: %s' % str(run))
        log.info('This is combination %d out of %d.' % (nr + 1, len(ensemble)))

        # ok, we need the ref_glaciers here for calibration
        # they should be initialiced so, just recreate them from the directory
        ref_gdirs = [
            GlacierDirectory(refid)
            for refid in preprocessing.ADDITIONAL_REFERENCE_GLACIERS
        ]

        # do the mass balance calibration
        compute_ref_t_stars(ref_gdirs + allgdirs)
        task_list = [
            tasks.local_t_star, tasks.mu_star_calibration,
            tasks.prepare_for_inversion, tasks.mass_conservation_inversion,
            tasks.filter_inversion_output, tasks.init_present_time_glacier
        ]

        for task in task_list:
            execute_entity_task(task, allgdirs)

        # check for glaciers to merge:
        gdirs_merged = []
        gdirs2sim = allgdirs.copy()
        for gid in allmeta.index:
            merg = merge_pair_dict(gid)
            if merg is not None:
                # main and tributary glacier
                gd2merge = [
                    gd for gd in allgdirs if gd.rgi_id in [gid] + merg[0]
                ]

                # actual merge task
                log.warning('DeprecationWarning: If downloadlink is updated ' +
                            'to gdirs_v1.2, remove filename kwarg')
                gdir_merged = merge_glacier_tasks(gd2merge,
                                                  gid,
                                                  buffer=merg[1],
                                                  filename='climate_monthly')

                # remove the entity glaciers from the simulation list
                gdirs2sim = [
                    gd for gd in gdirs2sim if gd.rgi_id not in [gid] + merg[0]
                ]

                gdirs_merged.append(gdir_merged)

        # add merged glaciers to the left over entity glaciers
        gdirs2sim += gdirs_merged

        # now only select the 1 glacier
        gdir = [gd for gd in gdirs2sim if gd.rgi_id == rgi_id][0]
        rgi_id0 = rgi_id.split('_')[0]
        meta = allmeta.loc[rgi_id0].copy()

        # do the actual simulations

        # spinup
        fls = gdir.read_pickle('model_flowlines')
        tbias = tbiasdict[run]
        mb = MultipleFlowlineMassBalance(gdir,
                                         fls=fls,
                                         mb_model_class=ConstantMassBalance,
                                         filename='climate_monthly',
                                         y0=spinup_y0,
                                         bias=mbbias)
        minimize_dl(tbias,
                    mb,
                    fls,
                    None,
                    None,
                    gdir,
                    False,
                    runsuffix='_{:02d}'.format(nr))

        # histalp
        # --------- GET SPINUP STATE ---------------
        tmp_mod = FileModel(
            gdir.get_filepath('model_run',
                              filesuffix='_spinup_{:02d}'.format(nr)))

        tmp_mod.run_until(tmp_mod.last_yr)

        # --------- HIST IT DOWN ---------------
        histrunsuffix = '_histalp{}_{:02d}'.format(runsuffix, nr)

        # now actual simulation
        run_from_climate_data(gdir,
                              ys=meta['first'],
                              ye=2014,
                              init_model_fls=tmp_mod.fls,
                              output_filesuffix=histrunsuffix,
                              climate_filename='climate_monthly',
                              bias=mbbias)

        # save the calibration parameter to the climate info file
        out = gdir.get_climate_info()
        out['ensemble_calibration'] = pdict
        gdir.write_json(out, 'climate_info')

        # copy stuff to storage
        basedir = os.path.join(storedir, rgi_id)
        ensdir = os.path.join(basedir, '{:02d}'.format(nr))
        mkdir(ensdir, reset=True)

        deep_path = os.path.join(ensdir, rgi_id[:8], rgi_id[:11], rgi_id)

        # copy whole GDir
        copy_to_basedir(gdir, base_dir=ensdir, setup='run')

        # copy run results
        fn1 = 'model_diagnostics_spinup_{:02d}.nc'.format(nr)
        shutil.copyfile(
            gdir.get_filepath('model_diagnostics',
                              filesuffix='_spinup_{:02d}'.format(nr)),
            os.path.join(deep_path, fn1))

        fn2 = 'model_diagnostics{}.nc'.format(histrunsuffix)
        shutil.copyfile(
            gdir.get_filepath('model_diagnostics', filesuffix=histrunsuffix),
            os.path.join(deep_path, fn2))

        fn3 = 'model_run_spinup_{:02d}.nc'.format(nr)
        shutil.copyfile(
            gdir.get_filepath('model_run',
                              filesuffix='_spinup_{:02d}'.format(nr)),
            os.path.join(deep_path, fn3))

        fn4 = 'model_run{}.nc'.format(histrunsuffix)
        shutil.copyfile(
            gdir.get_filepath('model_run', filesuffix=histrunsuffix),
            os.path.join(deep_path, fn4))

        log.warning('DeprecationWarning: If downloadlink is updated to ' +
                    'gdirs_v1.2 remove this copyfile:')
        # copy (old) climate monthly files which
        for fn in os.listdir(gdir.dir):
            if 'climate_monthly' in fn:
                shutil.copyfile(os.path.join(gdir.dir, fn),
                                os.path.join(deep_path, fn))
Exemplo n.º 5
0
def t_star_from_refmb(gdir, mbdf=None, glacierwide=None):
    """Computes the ref t* for the glacier, given a series of MB measurements.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
    mbdf: a pd.Series containing the observed MB data indexed by year
        if None, read automatically from the reference data

    Returns
    -------
    A dict: {t_star:[], bias:[]}
    """

    from oggm.core.massbalance import MultipleFlowlineMassBalance

    if glacierwide is None:
        glacierwide = cfg.PARAMS['tstar_search_glacierwide']

    # Be sure we have no marine terminating glacier
    assert not gdir.is_tidewater

    # Reference time series
    if mbdf is None:
        mbdf = gdir.get_ref_mb_data()['ANNUAL_BALANCE']

    # which years to look at
    ref_years = mbdf.index.values

    # Average oberved mass-balance
    ref_mb = np.mean(mbdf)

    # Compute one mu candidate per year and the associated statistics
    # Only get the years were we consider looking for tstar
    y0, y1 = cfg.PARAMS['tstar_search_window']
    ci = gdir.read_json('climate_info')
    y0 = y0 or ci['baseline_hydro_yr_0']
    y1 = y1 or ci['baseline_hydro_yr_1']
    years = np.arange(y0, y1 + 1)

    ny = len(years)
    mu_hp = int(cfg.PARAMS['mu_star_halfperiod'])
    mb_per_mu = pd.Series(index=years)

    if glacierwide:
        # The old (but fast) method to find t*
        _, temp, prcp = mb_yearly_climate_on_glacier(gdir, year_range=[y0, y1])

        # which years to look at
        selind = np.searchsorted(years, mbdf.index)
        sel_temp = temp[selind]
        sel_prcp = prcp[selind]
        sel_temp = np.mean(sel_temp)
        sel_prcp = np.mean(sel_prcp)

        for i, y in enumerate(years):

            # Ignore begin and end
            if ((i - mu_hp) < 0) or ((i + mu_hp) >= ny):
                continue

            # Compute the mu candidate
            t_avg = np.mean(temp[i - mu_hp:i + mu_hp + 1])
            if t_avg < 1e-3:  # if too cold no melt possible
                continue
            mu = np.mean(prcp[i - mu_hp:i + mu_hp + 1]) / t_avg

            # Apply it
            mb_per_mu[y] = np.mean(sel_prcp - mu * sel_temp)

    else:
        # The new (but slow) method to find t*
        # Compute mu for each 31-yr climatological period
        fls = gdir.read_pickle('inversion_flowlines')
        for i, y in enumerate(years):
            # Ignore begin and end
            if ((i - mu_hp) < 0) or ((i + mu_hp) >= ny):
                continue
            # Calibrate the mu for this year
            for fl in fls:
                fl.mu_star_is_valid = False
            try:
                # TODO: this is slow and can be highly optimised
                # it reads the same data over and over again
                _recursive_mu_star_calibration(gdir, fls, y, first_call=True)
                # Compute the MB with it
                mb_mod = MultipleFlowlineMassBalance(gdir,
                                                     fls,
                                                     bias=0,
                                                     check_calib_params=False)
                mb_ts = mb_mod.get_specific_mb(fls=fls, year=ref_years)
                mb_per_mu[y] = np.mean(mb_ts)
            except MassBalanceCalibrationError:
                pass

    # Diff to reference
    diff = (mb_per_mu - ref_mb).dropna()

    if len(diff) == 0:
        raise MassBalanceCalibrationError('No single valid mu candidate for '
                                          'this glacier!')

    # Here we used to keep all possible mu* in order to later select
    # them based on some distance search algorithms.
    # (revision 81bc0923eab6301306184d26462f932b72b84117)
    #
    # As of Jul 2018, we will now stop this non-sense:
    # out of all mu*, let's just pick the one with the smallest bias.
    # It doesn't make much sense, but the same is true for other methods
    # as well -> this is how Ben used to do it, and he is clever
    # Another way would be to pick the closest to today or something
    amin = np.abs(diff).idxmin()

    # Write
    d = gdir.read_json('climate_info')
    d['t_star'] = amin
    d['bias'] = diff[amin]
    gdir.write_json(d, 'climate_info')

    return {
        't_star': amin,
        'bias': diff[amin],
        'avg_mb_per_mu': mb_per_mu,
        'avg_ref_mb': ref_mb
    }
Exemplo n.º 6
0
def minor_xval_statistics(gdirs):
    # initialize the pandas dataframes

    # to store mass balances of every glacier
    mbdf = pd.DataFrame([], index=np.arange(1850, 2050))

    # Cross-validation
    file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv')
    cvdf = pd.read_csv(file, index_col=0)

    # dataframe output
    xval = pd.DataFrame([],
                        columns=[
                            'RGIId', 'Name', 'tstar_bias', 'xval_bias',
                            'interp_bias', 'mustar', 'tstar', 'xval_mustar',
                            'xval_tstar', 'interp_mustar'
                        ])

    for gd in gdirs:
        t_cvdf = cvdf.loc[gd.rgi_id]
        # heights, widths = gd.get_inversion_flowline_hw()

        # Observed mass-blance
        refmb = gd.get_ref_mb_data().copy()

        # Mass-balance model with cross-validated parameters instead
        # use the cross validated flowline mustars:
        cv_fls = [col for col in t_cvdf.index if 'cv_mustar_flowline' in col]
        cv_fls.sort()
        mustarlist = t_cvdf[cv_fls].sort_index().dropna().tolist()

        mb_mod = MultipleFlowlineMassBalance(gd,
                                             mu_star=mustarlist,
                                             bias=t_cvdf.cv_bias,
                                             use_inversion_flowlines=True)
        refmb['OGGM_cv'] = mb_mod.get_specific_mb(year=refmb.index)
        # Compare their standard deviation
        std_ref = refmb.ANNUAL_BALANCE.std()
        rcor = np.corrcoef(refmb.OGGM_cv, refmb.ANNUAL_BALANCE)[0, 1]
        if std_ref == 0:
            # I think that such a thing happens with some geodetic values
            std_ref = refmb.OGGM_cv.std()
            rcor = 1
        # Store the scores
        cvdf.loc[gd.rgi_id, 'CV_MB_BIAS'] = (refmb.OGGM_cv.mean() -
                                             refmb.ANNUAL_BALANCE.mean())
        cvdf.loc[gd.rgi_id,
                 'CV_MB_SIGMA_BIAS'] = (refmb.OGGM_cv.std() / std_ref)
        cvdf.loc[gd.rgi_id, 'CV_MB_COR'] = rcor

        # Mass-balance model with interpolated mu_star
        mb_mod = MultipleFlowlineMassBalance(gd,
                                             mu_star=t_cvdf.interp_mustar,
                                             bias=t_cvdf.cv_bias,
                                             use_inversion_flowlines=True)
        refmb['OGGM_mu_interp'] = mb_mod.get_specific_mb(year=refmb.index)
        cvdf.loc[gd.rgi_id, 'INTERP_MB_BIAS'] = (refmb.OGGM_mu_interp.mean() -
                                                 refmb.ANNUAL_BALANCE.mean())

        # Mass-balance model with best guess tstar
        mu_fls = [
            col for col in t_cvdf.index
            if ('mustar_flowline' in col) and ('cv_' not in col)
        ]
        mu_fls.sort()
        mustarlist = t_cvdf[mu_fls].sort_index().dropna().tolist()
        mb_mod = MultipleFlowlineMassBalance(gd,
                                             mu_star=mustarlist,
                                             bias=t_cvdf.bias,
                                             use_inversion_flowlines=True)

        refmb['OGGM_tstar'] = mb_mod.get_specific_mb(year=refmb.index)
        cvdf.loc[gd.rgi_id, 'tstar_MB_BIAS'] = (refmb.OGGM_tstar.mean() -
                                                refmb.ANNUAL_BALANCE.mean())

        # Pandas DataFrame Output
        #
        # 1. statistics
        tbias = cvdf.loc[gd.rgi_id, 'tstar_MB_BIAS']
        xbias = cvdf.loc[gd.rgi_id, 'CV_MB_BIAS']
        ibias = cvdf.loc[gd.rgi_id, 'INTERP_MB_BIAS']
        xval = xval.append(
            {
                'Name': gd.name,
                'RGIId': gd.rgi_id,
                'tstar_bias': tbias,
                'xval_bias': xbias,
                'interp_bias': ibias,
                # TODO wie mach ich das mit den Flowline Mus hier?
                'mustar': t_cvdf.mu_star_glacierwide,
                'tstar': t_cvdf.tstar,
                'xval_mustar': t_cvdf.cv_mu_star_glacierwide,
                'xval_tstar': t_cvdf.cv_t_star,
                'interp_mustar': t_cvdf.interp_mustar
            },
            ignore_index=True)

        #
        # 2. mass balance timeseries
        mbarray = np.dstack(
            (refmb.ANNUAL_BALANCE, refmb.OGGM_tstar, refmb.OGGM_cv)).squeeze()

        mbdf_add = pd.DataFrame(
            mbarray,
            columns=[[gd.rgi_id, gd.rgi_id, gd.rgi_id],
                     ['measured', 'calibrated', 'crossvalidated']],
            index=refmb.index)
        mbdf = pd.concat([mbdf, mbdf_add], axis=1)

    mbdf.columns = pd.MultiIndex.from_tuples(mbdf.columns)

    mbdf = mbdf.dropna(how='all')

    xval.index = xval.RGIId

    return xval, mbdf
def evolve_glacier_and_create_measurements(gdir, used_mb_models, yr_start_run,
                                           yr_spinup, yr_end_run):
    """TODO
    """
    fls_spinup = gdir.read_pickle('model_flowlines', filesuffix='_spinup')
    yr_rgi = gdir.rgi_date
    # now start actual experiment run for the creation of measurements
    if used_mb_models == 'constant':
        halfsize_spinup = (yr_start_run - yr_spinup) / 2
        mb_spinup = MultipleFlowlineMassBalance(
            gdir,
            fls=fls_spinup,
            mb_model_class=ConstantMassBalance,
            filename='climate_historical',
            input_filesuffix='',
            y0=yr_spinup + halfsize_spinup,
            halfsize=halfsize_spinup)
        halfsize_run = (yr_end_run - yr_start_run) / 2
        mb_run = MultipleFlowlineMassBalance(
            gdir,
            fls=fls_spinup,
            mb_model_class=ConstantMassBalance,
            filename='climate_historical',
            input_filesuffix='',
            y0=yr_start_run + halfsize_run,
            halfsize=halfsize_run)

        # save used massbalance models for combine
        mb_models_combine = {
            'MB1': {
                'type': 'constant',
                'years': np.array([yr_spinup, yr_start_run])
            },
            'MB2': {
                'type': 'constant',
                'years': np.array([yr_start_run, yr_end_run])
            }
        }
        gdir.write_pickle(mb_models_combine,
                          'inversion_input',
                          filesuffix='_combine_mb_models')

    else:
        raise NotImplementedError(f'{used_mb_models}')

    # do spinup period before first measurement
    model = FluxBasedModel(copy.deepcopy(fls_spinup), mb_spinup, y0=yr_spinup)
    model.run_until_and_store(yr_start_run,
                              diag_path=gdir.get_filepath(
                                  'model_diagnostics',
                                  filesuffix='_combine_spinup'))

    # get measurements for dhdt
    dh_volume = [None, None]
    dh_area = [None, None]
    dh_volume[0] = model.volume_m3
    dh_area[0] = model.area_m2

    # switch to mb_run and run to rgi_date and save measurements and flowline
    model = FluxBasedModel(copy.deepcopy(model.fls), mb_run, y0=yr_start_run)
    model.run_until(yr_rgi)
    gdir.write_pickle(model.fls,
                      'model_flowlines',
                      filesuffix='_combine_true_init')
    rgi_date_area_km2 = model.area_km2
    rgi_date_volume_km3 = model.volume_km3
    rgi_date_us_myr = model.u_stag[0] * model._surf_vel_fac * SEC_IN_YEAR

    # now run to the end for dhdt
    model.run_until_and_store(yr_end_run,
                              diag_path=gdir.get_filepath(
                                  'model_diagnostics',
                                  filesuffix='_combine_end'))
    gdir.write_pickle(model.fls,
                      'model_flowlines',
                      filesuffix='_combine_true_end')
    dh_volume[1] = model.volume_m3
    dh_area[1] = model.area_m2

    # calculate dh
    dh_m = (dh_volume[1] - dh_volume[0]) / \
           ((dh_area[1] + dh_area[0]) / 2.)

    # save measurements in gdir
    all_measurements = {
        'dh:m': {
            '2000-2019': dh_m
        },
        'area:km2': {
            str(yr_rgi): rgi_date_area_km2
        },
        'volume:km3': {
            str(yr_rgi): rgi_date_volume_km3
        },
        'us:myr-1': {
            str(yr_rgi): rgi_date_us_myr
        },
    }
    gdir.write_pickle(all_measurements,
                      'inversion_input',
                      filesuffix='_combine_measurements')
Exemplo n.º 8
0
#for gdir in gdirs:
#    if gdir.rgi_id in rm_list:
#        gdirs.remove(gdir)

# define year range
years = np.arange(1903, 2020)

# create dataframe to store results
mb_result = pd.DataFrame()

# Flowline Mass Balance
from oggm.core.massbalance import MultipleFlowlineMassBalance, PastMassBalance
for gdir in gdirs:
    rgi_id = gdir.rgi_id
    mbmod = MultipleFlowlineMassBalance(gdir,
                                        use_inversion_flowlines=True,
                                        mb_model_class=PastMassBalance)
    mb_ts = mbmod.get_specific_mb(year=years)  # get mass balance
    # create dataframe of mb per year
    temp_df = pd.DataFrame({'mb': mb_ts}, index=years)

    # read geodetic mb for the glacier
    test = geodmb[geodmb['RGIId'] == rgi_id]

    # get mm w.e. per year
    mmwe5385 = test['dmwe_53_85'].loc[test.index[0]] * 1000 / 32
    mmwe8516 = test['dmwe_85_16'].loc[test.index[0]] * 1000 / 31

    # avg difference to oggm mb per period. # Note hydrological year in OGGM.
    davg5385 = np.average(temp_df['mb'].loc[1954:1986]) - mmwe5385
    davg8516 = np.average(temp_df['mb'].loc[1986:2017]) - mmwe8516
Exemplo n.º 9
0
def run_and_store_from_disk(rgi,
                            histalp_storage,
                            commit_storage,
                            y0=1999,
                            years=300,
                            seed=None,
                            unique_samples=False,
                            halfsize=15):

    for i in np.arange(999):
        # Local working directory (where OGGM will write its output)
        storage_dir = os.path.join(histalp_storage, rgi, '{:02d}'.format(i),
                                   rgi[:8], rgi[:11], rgi)
        new_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier',
                               rgi[:8], rgi[:11], rgi)

        # make sure directory is empty:
        try:
            shutil.rmtree(new_dir)
        except FileNotFoundError:
            pass
        # if path does not exist, we handled all ensemble members:
        try:
            shutil.copytree(storage_dir, new_dir)
        except FileNotFoundError:
            log.info('processed {:02d} ensemble members'.format(i))
            break

        gdir = GlacierDirectory(rgi)

        pdict = gdir.get_climate_info()['ensemble_calibration']

        cfg.PARAMS['prcp_scaling_factor'] = pdict['prcp_scaling_factor']
        default_glena = 2.4e-24
        cfg.PARAMS['glen_a'] = pdict['glena_factor'] * default_glena
        cfg.PARAMS['inversion_glen_a'] = pdict['glena_factor'] * default_glena
        mbbias = pdict['mbbias']

        tmp_mod = FileModel(
            gdir.get_filepath('model_run',
                              filesuffix='_histalp_{:02d}'.format(i)))
        tmp_mod.run_until(tmp_mod.last_yr)

        mb = MultipleFlowlineMassBalance(gdir,
                                         mb_model_class=RandomMassBalance,
                                         filename='climate_monthly',
                                         bias=mbbias,
                                         y0=y0,
                                         seed=seed,
                                         unique_samples=unique_samples,
                                         halfsize=halfsize)

        robust_model_run(gdir,
                         output_filesuffix='commitment{:04d}_{:02d}'.format(
                             y0, i),
                         mb_model=mb,
                         ys=0,
                         ye=years,
                         init_model_fls=tmp_mod.fls)

        fn1 = 'model_diagnostics_commitment{:04d}_{:02d}.nc'.format(y0, i)
        shutil.copyfile(
            gdir.get_filepath('model_diagnostics',
                              filesuffix='commitment{:04d}_{:02d}'.format(
                                  y0, i)), os.path.join(commit_storage, fn1))

        fn4 = 'model_run_commitment{:04d}_{:02d}.nc'.format(y0, i)
        shutil.copyfile(
            gdir.get_filepath('model_run',
                              filesuffix='commitment{:04d}_{:02d}'.format(
                                  y0, i)), os.path.join(commit_storage, fn4))
def create_spinup_glacier(gdir, rgi_id_to_name, yr_start_run, yr_end_run,
                          yr_spinup, used_mb_models):
    """TODO
    """
    # now creat spinup glacier with consensus flowline starting from ice free,
    # try to match length at rgi_date for 'realistic' experiment setting
    fl_consensus = gdir.read_pickle('model_flowlines',
                                    filesuffix='_consensus')[0]
    length_m_ref = fl_consensus.length_m

    fls_spinup = copy.deepcopy([fl_consensus])
    fls_spinup[0].thick = np.zeros(len(fls_spinup[0].thick))
    halfsize = (yr_start_run - yr_spinup) / 2
    yr_rgi = gdir.rgi_date

    mb_spinup = MultipleFlowlineMassBalance(gdir,
                                            fls=fls_spinup,
                                            mb_model_class=ConstantMassBalance,
                                            filename='climate_historical',
                                            input_filesuffix='',
                                            y0=yr_spinup + halfsize,
                                            halfsize=halfsize)

    mb_historical = MultipleFlowlineMassBalance(gdir,
                                                fls=fls_spinup,
                                                mb_model_class=PastMassBalance,
                                                filename='climate_historical',
                                                input_filesuffix='')

    def spinup_run(t_bias):
        # with t_bias the glacier state after spinup is changed between iterations
        mb_spinup.temp_bias = t_bias
        # run the spinup
        model_spinup = FluxBasedModel(copy.deepcopy(fls_spinup),
                                      mb_spinup,
                                      y0=0)
        model_spinup.run_until_equilibrium(max_ite=1000)

        # Now conduct the actual model run to the rgi date
        model_historical = FluxBasedModel(model_spinup.fls,
                                          mb_historical,
                                          y0=yr_spinup)
        model_historical.run_until(yr_rgi)

        cost = (model_historical.length_m - length_m_ref) / length_m_ref * 100

        return cost

    glacier_name = rgi_id_to_name[gdir.rgi_id]
    if experiment_glaciers[glacier_name]['t_bias_spinup_limits'] is None:
        print(
            'returned spinup_run function for searching for the t_bias_limits!'
        )
        return gdir, spinup_run
    else:
        t_bias_spinup_limits = \
            experiment_glaciers[glacier_name]['t_bias_spinup_limits']

    if experiment_glaciers[glacier_name]['t_bias_spinup'] is None:
        # search for it by giving back
        t_bias_spinup = brentq(spinup_run,
                               t_bias_spinup_limits[0],
                               t_bias_spinup_limits[1],
                               maxiter=20,
                               disp=False)
    else:
        t_bias_spinup = experiment_glaciers[glacier_name]['t_bias_spinup']

    print(
        f'{gdir.name} spinup tbias: {t_bias_spinup} with L mismatch at rgi_date:'
        f' {spinup_run(t_bias_spinup)} m')

    mb_spinup.temp_bias = t_bias_spinup
    model_spinup = FluxBasedModel(copy.deepcopy(fls_spinup), mb_spinup, y0=0)
    model_spinup.run_until_equilibrium(max_ite=1000)

    gdir.write_pickle(model_spinup.fls,
                      'model_flowlines',
                      filesuffix='_spinup')
Exemplo n.º 11
0
tstar_url = 'https://cluster.klima.uni-bremen.de/~oggm/ref_mb_params/oggm_v1.4/RGIV62/CRU/centerlines/qc3/pcp2.5'

workflow.download_ref_tstars(base_url=tstar_url)

# run climate related entity tasks
workflow.climate_tasks(gdirs)  # Downloads some files on the first time!

# remove glacier that caused error, setting rgi IDs is manual
#for gdir in gdirs:
#    if gdir.rgi_id == 'RGI60-05.01510':
#        gdirs.remove(gdir)

# Flowline Mass Balance
from oggm.core.massbalance import MultipleFlowlineMassBalance
for gdir in gdirs:
    mbmod = MultipleFlowlineMassBalance(gdir, use_inversion_flowlines=True)

# Ice thickness
list_talks = [
    tasks.prepare_for_inversion,  # This is a preprocessing task
    tasks.mass_conservation_inversion,  # This does the actual job
    tasks.
    filter_inversion_output  # This smoothes the thicknesses at the tongue a little
]
for task in list_talks:
    workflow.execute_entity_task(task, gdirs)

# Convert the flowlines to a "glacier" for the ice dynamics module
workflow.execute_entity_task(tasks.init_present_time_glacier, gdirs)

###  GCM data simulation ###
Exemplo n.º 12
0
    # get tstars data to working dir
    tstar_url = 'https://cluster.klima.uni-bremen.de/~oggm/ref_mb_params/oggm_v1.4/RGIV62/CRU/centerlines/qc3/pcp2.5'
    workflow.download_ref_tstars(base_url=tstar_url)

    # run climate related entity tasks
    try:  # try statement allows to skip errors
        workflow.climate_tasks(
            gdirs)  # Downloads some files on the first time!
    except:  # if exception is raised, add ID to list and return to beginning of loop
        excludeIDs.append(rgiid)
        pass
        continue

    ### Mass balance ###
    from oggm.core.massbalance import MultipleFlowlineMassBalance
    mbmod = MultipleFlowlineMassBalance(gdir, use_inversion_flowlines=True)

    years = np.arange(1953, 2016)
    mb_ts = mbmod.get_specific_mb(year=years)
    #plt.plot(years, mb_ts); plt.ylabel('SMB (mm yr$^{-1}$)')

    ### Ice thickness ###
    list_talks = [
        tasks.prepare_for_inversion,  # This is a preprocessing task
        tasks.mass_conservation_inversion,  # This does the actual job
        tasks.
        filter_inversion_output  # This smoothes the thicknesses at the tongue a little
    ]
    for task in list_talks:
        workflow.execute_entity_task(task, gdirs)
Exemplo n.º 13
0
def get_mean_temps_eq(rgi, histalp_storage, comit_storage, ensmembers):
    from oggm import cfg, utils, GlacierDirectory
    from oggm.core.massbalance import MultipleFlowlineMassBalance
    from oggm.core.flowline import FileModel
    import shutil


    # 1. get mean surface heights
    df85 = pd.DataFrame([])
    df99 = pd.DataFrame([])
    for i in range(ensmembers):
        fnc1 = os.path.join(comit_storage, rgi,
                            'model_run_commitment1885_{:02d}.nc'.format(i))
        fnc2 = os.path.join(comit_storage, rgi,
                            'model_run_commitment1999_{:02d}.nc'.format(i))
        tmpmod1 = FileModel(fnc1)
        tmpmod2 = FileModel(fnc2)
        for j in np.arange(270, 301):
            tmpmod1.run_until(j)
            df85.loc[:, '{}{}'.format(i, j)] = tmpmod1.fls[-1].surface_h
            tmpmod2.run_until(j)
            df99.loc[:, '{}{}'.format(i, j)] = tmpmod2.fls[-1].surface_h

    meanhgt99 = df99.mean(axis=1).values
    meanhgt85 = df85.mean(axis=1).values

    # 2. get the climate
    # Initialize OGGM
    cfg.initialize()
    wd = utils.gettempdir(reset=True)
    cfg.PATHS['working_dir'] = wd
    utils.mkdir(wd, reset=True)
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # and set standard histalp values
    cfg.PARAMS['temp_melt'] = -1.75

    i = 0
    storage_dir = os.path.join(histalp_storage, rgi, '{:02d}'.format(i),
                               rgi[:8], rgi[:11], rgi)
    new_dir = os.path.join(cfg.PATHS['working_dir'], 'per_glacier',
                           rgi[:8], rgi[:11], rgi)
    shutil.copytree(storage_dir, new_dir)
    gdir = GlacierDirectory(rgi)
    mb = MultipleFlowlineMassBalance(gdir, filename='climate_monthly',
                                     check_calib_params=False)
    # need to do the above for every ensemble member if I consider PRECIP!
    # and set cfg.PARAMS['prcp_scaling_factor'] = pdict['prcp_scaling_factor']

    df99_2 = pd.DataFrame()
    df85_2 = pd.DataFrame()
    for i in np.arange(9, 12):
        for y in np.arange(1870, 1901):
            flyear = utils.date_to_floatyear(y, i)
            tmp = mb.flowline_mb_models[-1].get_monthly_climate(meanhgt85,
                                                                flyear)[0]
            df85_2.loc[y, i] = tmp.mean()
        for y in np.arange(1984, 2015):
            tmp = mb.flowline_mb_models[-1].get_monthly_climate(meanhgt99,
                                                                flyear)[0]
            df99_2.loc[y, i] = tmp.mean()

    t99 = df99_2.mean().mean()
    t85 = df85_2.mean().mean()
    return t85, t99
Exemplo n.º 14
0
gdirs = workflow.init_glacier_regions(rgidf)

# Cross-validation
file = path.join(cfg.PATHS['working_dir'], 'ref_tstars.csv')
ref_df = pd.read_csv(file, index_col=0)
for i, gdir in enumerate(gdirs):

    print('Cross-validation iteration {} of {}'.format(i + 1, len(ref_df)))

    # Now recalibrate the model blindly
    tmp_ref_df = ref_df.loc[ref_df.index != gdir.rgi_id]
    tasks.local_t_star(gdir, ref_df=tmp_ref_df)
    tasks.mu_star_calibration(gdir)

    # Mass-balance model with cross-validated parameters instead
    mb_mod = MultipleFlowlineMassBalance(gdir, mb_model_class=PastMassBalance)

    # Mass-balance timeseries, observed and simulated
    refmb = gdir.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(year=refmb.index)

    # Compare their standard deviation
    std_ref = refmb.ANNUAL_BALANCE.std()
    rcor = np.corrcoef(refmb.OGGM, refmb.ANNUAL_BALANCE)[0, 1]
    if std_ref == 0:
        # I think that such a thing happens with some geodetic values
        std_ref = refmb.OGGM.std()
        rcor = 1

    # Store the scores
    ref_df.loc[gdir.rgi_id, 'CV_MB_BIAS'] = (refmb.OGGM.mean() -
Exemplo n.º 15
0
execute_entity_task(tasks.mu_star_calibration, gdirs)

# We store the associated params
mb_calib = gdirs[0].read_pickle('climate_info')['mb_calib_params']
with open(os.path.join(WORKING_DIR, 'mb_calib_params.json'), 'w') as fp:
    json.dump(mb_calib, fp)

# And also some statistics
utils.compile_glacier_statistics(gdirs)

# Tests: for all glaciers, the mass-balance around tstar and the
# bias with observation should be approx 0
for gd in gdirs:

    mb_mod = MultipleFlowlineMassBalance(gd,
                                         mb_model_class=ConstantMassBalance,
                                         use_inversion_flowlines=True,
                                         bias=0)  # bias=0 because of calib!
    mb = mb_mod.get_specific_mb()
    np.testing.assert_allclose(mb, 0, atol=5)  # atol for numerical errors

    mb_mod = MultipleFlowlineMassBalance(gd, mb_model_class=PastMassBalance,
                                         use_inversion_flowlines=True)

    refmb = gd.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(year=refmb.index)
    np.testing.assert_allclose(refmb.OGGM.mean(), refmb.ANNUAL_BALANCE.mean(),
                               atol=5)  # atol for numerical errors

# Log
log.info('Calibration is done!')
Exemplo n.º 16
0
execute_entity_task(tasks.mu_star_calibration, gdirs)

# We store the associated params
mb_calib = gdirs[0].read_pickle('climate_info')['mb_calib_params']
with open(os.path.join(WORKING_DIR, 'mb_calib_params.json'), 'w') as fp:
    json.dump(mb_calib, fp)

# And also some statistics
utils.compile_glacier_statistics(gdirs)

# Tests: for all glaciers, the mass-balance around tstar and the
# bias with observation should be approx 0
for gd in gdirs:

    mb_mod = MultipleFlowlineMassBalance(gd,
                                         mb_model_class=ConstantMassBalance,
                                         use_inversion_flowlines=True,
                                         bias=0)  # bias=0 because of calib!
    mb = mb_mod.get_specific_mb()
    np.testing.assert_allclose(mb, 0, atol=5)  # atol for numerical errors

    mb_mod = MultipleFlowlineMassBalance(gd,
                                         mb_model_class=PastMassBalance,
                                         use_inversion_flowlines=True)

    refmb = gd.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(year=refmb.index)
    np.testing.assert_allclose(refmb.OGGM.mean(),
                               refmb.ANNUAL_BALANCE.mean(),
                               atol=5)  # atol for numerical errors

# Log
Exemplo n.º 17
0
def quick_crossval_entity(gdir, full_ref_df=None):

    tmpdf = pd.DataFrame(
        [], columns=['std_oggm', 'std_ref', 'rmse', 'core', 'bias'])

    # the reference glaciers
    tmp_ref_df = full_ref_df.loc[full_ref_df.index != gdir.rgi_id]

    # before the cross-val store the info about "real" mustar
    ref_rdf = gdir.read_json('local_mustar')

    tasks.local_t_star(gdir, ref_df=tmp_ref_df)
    tasks.mu_star_calibration(gdir)

    # read crossvalidated values
    cv_rdf = gdir.read_json('local_mustar')

    # ----
    # --- MASS-BALANCE MODEL
    mb_mod = MultipleFlowlineMassBalance(gdir, use_inversion_flowlines=True)

    # Mass-balance timeseries, observed and simulated
    refmb = gdir.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(year=refmb.index)

    # store single glacier results
    bias = refmb.OGGM.mean() - refmb.ANNUAL_BALANCE.mean()
    rmse = np.sqrt(np.mean(refmb.OGGM - refmb.ANNUAL_BALANCE)**2)
    rcor = np.corrcoef(refmb.OGGM, refmb.ANNUAL_BALANCE)[0, 1]

    ref_std = refmb.ANNUAL_BALANCE.std()

    # unclear how to treat this best
    if ref_std == 0:
        ref_std = refmb.OGGM.std()
        rcor = 1

    tmpdf.loc[len(tmpdf.index)] = {
        'std_oggm': refmb.OGGM.std(),
        'std_ref': ref_std,
        'bias': bias,
        'rmse': rmse,
        'core': rcor
    }

    # and store mean values
    out = {
        'prcpsf': cfg.PARAMS['prcp_scaling_factor'],
        'tliq': cfg.PARAMS['temp_all_liq'],
        'tmelt': cfg.PARAMS['temp_melt'],
        'tgrad': cfg.PARAMS['temp_default_gradient'],
        'std_oggm': tmpdf.std_oggm.values[0],
        'std_ref': tmpdf.std_ref.values[0],
        'std_quot': np.nan,
        'bias': tmpdf['bias'].mean(),
        'rmse': tmpdf['rmse'].mean(),
        'core': tmpdf['core'].mean()
    }

    # combine "real" mustar and crossvalidated mu_star
    # get rid of mu_star_per_flowline as list of flowlines is ugly to deal with
    for i, fl in enumerate(cv_rdf['mu_star_per_flowline']):
        cv_rdf['mustar_flowline_{:03d}'.format(i + 1)] = fl
    for i, fl in enumerate(ref_rdf['mu_star_per_flowline']):
        ref_rdf['mustar_flowline_{:03d}'.format(i + 1)] = fl
    del cv_rdf['mu_star_per_flowline']
    del ref_rdf['mu_star_per_flowline']

    for col in cv_rdf.keys():
        if 'rgi_id' in col:
            continue
        ref_rdf['cv_' + col] = cv_rdf[col]

    return [out, ref_rdf]
Exemplo n.º 18
0
gdirs = workflow.init_glacier_directories(rids)

# Cross-validation
file = os.path.join(cfg.PATHS['working_dir'], 'ref_tstars.csv')
ref_df = pd.read_csv(file, index_col=0)
for i, gdir in enumerate(gdirs):

    print('Cross-validation iteration {} of {}'.format(i + 1, len(ref_df)))

    # Now recalibrate the model blindly
    tmp_ref_df = ref_df.loc[ref_df.index != gdir.rgi_id]
    tasks.local_t_star(gdir, ref_df=tmp_ref_df)
    tasks.mu_star_calibration(gdir)

    # Mass-balance model with cross-validated parameters instead
    mb_mod = MultipleFlowlineMassBalance(gdir, mb_model_class=PastMassBalance,
                                         use_inversion_flowlines=True)

    # Mass-balance timeseries, observed and simulated
    refmb = gdir.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(year=refmb.index)

    # Compare their standard deviation
    std_ref = refmb.ANNUAL_BALANCE.std()
    rcor = np.corrcoef(refmb.OGGM, refmb.ANNUAL_BALANCE)[0, 1]
    if std_ref == 0:
        # I think that such a thing happens with some geodetic values
        std_ref = refmb.OGGM.std()
        rcor = 1

    # Store the scores
    ref_df.loc[gdir.rgi_id, 'CV_MB_BIAS'] = (refmb.OGGM.mean() -
Exemplo n.º 19
0
def systematic_spinup(gdir, meta, mb_bias=None, y0=1999):

    # how long are we at initialization
    fls = gdir.read_pickle('model_flowlines')
    # TODO maybe not use 2003 as fixed date, but rather ask for the RGI date
    #   this then needs to be considered in meta as well
    len2003 = fls[-1].length_m
    # how long shall we go? MINUS for positive length change!
    dl = -meta['dL2003']
    # mass balance model
    log.warning('DeprecationWarning: If downloadlink is updated to ' +
                'gdirs_v1.2 remove filename kwarg')
    mb = MultipleFlowlineMassBalance(gdir,
                                     fls=fls,
                                     mb_model_class=ConstantMassBalance,
                                     filename='climate_monthly',
                                     y0=y0,
                                     bias=mb_bias)

    # coarse first test values
    totest = np.arange(-8, 3.1)

    # dataframe for results
    rval = pd.DataFrame([], columns=['delta'], dtype=float)

    # linespace counter
    lsc = 0
    # linespace shift
    lss = [0.5, 0.25, 0.125, 0.06, 0.02]

    while True:
        # dont do anything twice
        totest = totest[~np.isin(totest, rval.index)]
        for tb in totest:
            delta = minimize_dl(tb, mb, fls, dl, len2003, gdir, True)
            if delta == len2003**2:
                delta = np.nan

            rval.loc[tb, 'delta'] = delta
            if np.sqrt(delta) < fls[-1].dx_meter:
                break

        if np.sqrt(delta) < fls[-1].dx_meter:
            break

        if lsc == len(lss):
            log.info('SPINUP WARNING (%s): use best result so far!' %
                     gdir.rgi_id)
            break

        if rval['delta'].isna().all():
            log.info('SPINUP ERROR (%s): could not find working tbias!' %
                     gdir.rgi_id)
            return -999

        # no fit so far, get new tbias to test:
        # current minima
        cmin = rval['delta'].idxmin()

        totest = np.linspace(cmin - lss[lsc], cmin + lss[lsc], 5).round(2)
        lsc += 1

    tbias = rval.dropna().idxmin().iloc[0]
    delta = np.sqrt(rval.loc[tbias, 'delta'])

    log.info('(%s) delta = %.2f (flowline spacing = %.2f)' %
             (gdir.rgi_id, delta, fls[-1].dx_meter))

    # --------- SPIN IT UP FOR REAL ---------------
    minimize_dl(tbias, mb, fls, None, None, gdir, False)
    return tbias