Exemplo n.º 1
0
def velocity_to_gdir(gdir, add_error=False):
    """Reproject the its_live files to the given glacier directory.

    Variables are added to the gridded_data nc file.

    Reprojecting velocities from one map proj to another is done
    reprojecting the vector distances. In this process, absolute velocities
    might change as well because map projections do not always preserve
    distances -> we scale them back to the original velocities as per the
    ITS_LIVE documentation that states that velocities are given in
    ground units, i.e. absolute velocities.

    We use bilinear interpolation to reproject the velocities to the local
    glacier map.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    add_error : bool
        also reproject and scale the error data
    """

    if not gdir.has_file('gridded_data'):
        raise InvalidWorkflowError('Please run `glacier_masks` before running '
                                   'this task')

    _reproject_and_scale(gdir, do_error=False)
    if add_error:
        _reproject_and_scale(gdir, do_error=True)
Exemplo n.º 2
0
def filter_inversion_output(gdir):
    """Filters the last few grid points after the physically-based inversion.

    For various reasons (but mostly: the equilibrium assumption), the last few
    grid points on a glacier flowline are often noisy and create unphysical
    depressions. Here we try to correct for that. It is not volume conserving,
    but area conserving.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        the glacier directory to process
    """

    if gdir.is_tidewater:
        # No need for filter in tidewater case
        return

    if not gdir.has_file('downstream_line'):
        raise InvalidWorkflowError('filter_inversion_output now needs a '
                                   'previous call to the '
                                   'compute_dowstream_line and '
                                   'compute_downstream_bedshape tasks')

    dic_ds = gdir.read_pickle('downstream_line')
    bs = np.average(dic_ds['bedshapes'][:3])

    n = -5

    cls = gdir.read_pickle('inversion_output')
    cl = cls[-1]

    # First guess thickness based on width
    w = cl['width'][n:]
    s = w**3 * bs / 6
    h = 3 / 2 * s / w

    # Smoothing things out a bit
    hts = np.append(np.append(cl['thick'][n - 3:n], h), 0)
    h = utils.smooth1d(hts, 3)[n - 1:-1]

    # Recompute bedshape based on that
    bs = utils.clip_min(4 * h / w**2, cfg.PARAMS['mixed_min_shape'])

    # OK, done
    s = w**3 * bs / 6

    cl['thick'][n:] = 3 / 2 * s / w
    cl['volume'][n:] = s * cl['dx']
    cl['is_trapezoid'][n:] = False
    cl['is_rectangular'][n:] = False

    gdir.write_pickle(cls, 'inversion_output')

    # output the volume here - this simplifies code for some downstream funcs
    return np.sum([np.sum(cl['volume']) for cl in cls])
Exemplo n.º 3
0
def _check_duplicates(rgidf=None):
    """Complain if the input has duplicates."""

    if rgidf is None:
        return
    # Check if dataframe or list of strs
    try:
        rgidf = rgidf.RGIId
    except AttributeError:
        rgidf = utils.tolist(rgidf)
    u, c = np.unique(rgidf, return_counts=True)
    if len(u) < len(rgidf):
        raise InvalidWorkflowError('Found duplicates in the list of '
                                   'RGI IDs: {}'.format(u[c > 1]))
Exemplo n.º 4
0
def execute_entity_task(task, gdirs, **kwargs):
    """Execute a task on gdirs.

    If you asked for multiprocessing, it will do it.

    If ``task`` has more arguments than `gdir` they have to be keyword
    arguments.

    Parameters
    ----------
    task : function
         the entity task to apply
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the glacier directories to process
    """

    if task.__dict__.get('is_global_task', False):
        raise InvalidWorkflowError('execute_entity_task cannot be used on '
                                   'global tasks.')

    # Should be iterable
    gdirs = utils.tolist(gdirs)
    ng = len(gdirs)
    if ng == 0:
        log.workflow('Called entity task %s on 0 glaciers. Returning...',
                     task.__name__)
        return

    log.workflow('Execute entity task %s on %d glaciers', task.__name__, ng)

    pc = _pickle_copier(task, kwargs)

    if _have_ogmpi:
        if ogmpi.OGGM_MPI_COMM is not None:
            return ogmpi.mpi_master_spin_tasks(pc, gdirs)

    if cfg.PARAMS['use_multiprocessing'] and ng > 1:
        mppool = init_mp_pool(cfg.CONFIG_MODIFIED)
        out = mppool.map(pc, gdirs, chunksize=1)
    else:
        if ng > 3:
            log.workflow(
                'WARNING: you are trying to run an entity task on '
                '%d glaciers with multiprocessing turned off. OGGM '
                'will run faster with multiprocessing turned on.', ng)
        out = [pc(gdir) for gdir in gdirs]

    return out
Exemplo n.º 5
0
def _check_rgi_input(rgidf=None):
    """Complain if the input has duplicates."""

    if rgidf is None:
        return
    # Check if dataframe or list of strs
    try:
        rgi_ids = rgidf.RGIId
        # if dataframe we can also check for connectivity
        if 'Connect' in rgidf and np.any(rgidf['Connect'] == 2):
            log.workflow('WARNING! You have glaciers with connectivity level '
                         '2 in your list. OGGM does not provide pre-processed '
                         'directories for these.')
    except AttributeError:
        rgi_ids = utils.tolist(rgidf)
    u, c = np.unique(rgi_ids, return_counts=True)
    if len(u) < len(rgi_ids):
        raise InvalidWorkflowError('Found duplicates in the list of '
                                   'RGI IDs: {}'.format(u[c > 1]))
Exemplo n.º 6
0
def velocity_to_gdir(gdir, add_error=False):
    """Reproject the its_live files to the given glacier directory.

    The data source used is https://its-live.jpl.nasa.gov/#data
    Currently the only data downloaded is the 120m composite for both
    (u, v) and their uncertainty. The composite is computed from the
    1985 to 2018 average.

    Variables are added to the gridded_data nc file.

    Reprojecting velocities from one map proj to another is done
    reprojecting the vector distances. In this process, absolute velocities
    might change as well because map projections do not always preserve
    distances -> we scale them back to the original velocities as per the
    ITS_LIVE documentation that states that velocities are given in
    ground units, i.e. absolute velocities.

    We use bilinear interpolation to reproject the velocities to the local
    glacier map.

    If you want more velocity products, feel free to open a new topic
    on OGGM's issue tracker!

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    add_error : bool
        also reproject and scale the error data
    """

    if not gdir.has_file('gridded_data'):
        raise InvalidWorkflowError('Please run `glacier_masks` before running '
                                   'this task')

    _reproject_and_scale(gdir, do_error=False)
    if add_error:
        _reproject_and_scale(gdir, do_error=True)
Exemplo n.º 7
0
def calibrate_inversion_from_consensus(gdirs, ignore_missing=True,
                                       fs=0, a_bounds=(0.1, 10),
                                       apply_fs_on_mismatch=False,
                                       error_on_mismatch=True):
    """Fit the total volume of the glaciers to the 2019 consensus estimate.

    This method finds the "best Glen A" to match all glaciers in gdirs with
    a valid inverted volume.

    Parameters
    ----------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the glacier directories to process
    ignore_missing : bool
        set this to true to silence the error if some glaciers could not be
        found in the consensus estimate.
    fs : float
        invert with sliding (default: no)
    a_bounds: tuple
        factor to apply to default A
    apply_fs_on_mismatch: false
        on mismatch, try to apply an arbitrary value of fs (fs = 5.7e-20 from
        Oerlemans) and try to otpimize A again.
    error_on_mismatch: bool
        sometimes the given bounds do not allow to find a zero mismatch:
        this will normally raise an error, but you can switch this off,
        use the closest value instead and move on.

    Returns
    -------
    a dataframe with the individual glacier volumes
    """

    gdirs = utils.tolist(gdirs)

    # Get the ref data for the glaciers we have
    df = pd.read_hdf(utils.get_demo_file('rgi62_itmix_df.h5'))
    rids = [gdir.rgi_id for gdir in gdirs]

    found_ids = df.index.intersection(rids)
    if not ignore_missing and (len(found_ids) != len(rids)):
        raise InvalidWorkflowError('Could not find matching indices in the '
                                   'consensus estimate for all provided '
                                   'glaciers. Set ignore_missing=True to '
                                   'ignore this error.')

    df = df.reindex(rids)

    # Optimize the diff to ref
    def_a = cfg.PARAMS['inversion_glen_a']

    def compute_vol(x):
        inversion_tasks(gdirs, glen_a=x*def_a, fs=fs)
        odf = df.copy()
        odf['oggm'] = execute_entity_task(tasks.get_inversion_volume, gdirs)
        return odf.dropna()

    def to_minimize(x):
        log.workflow('Consensus estimate optimisation with '
                     'A factor: {} and fs: {}'.format(x, fs))
        odf = compute_vol(x)
        return odf.vol_itmix_m3.sum() - odf.oggm.sum()

    try:
        out_fac, r = optimization.brentq(to_minimize, *a_bounds, rtol=1e-2,
                                         full_output=True)
        if r.converged:
            log.workflow('calibrate_inversion_from_consensus '
                         'converged after {} iterations and fs={}. The '
                         'resulting Glen A factor is {}.'
                         ''.format(r.iterations, fs, out_fac))
        else:
            raise ValueError('Unexpected error in optimization.brentq')
    except ValueError:
        # Ok can't find an A. Log for debug:
        odf1 = compute_vol(a_bounds[0]).sum() * 1e-9
        odf2 = compute_vol(a_bounds[1]).sum() * 1e-9
        msg = ('calibration fom consensus estimate CANT converge with fs={}.\n'
               'Bound values (km3):\nRef={:.3f} OGGM={:.3f} for A factor {}\n'
               'Ref={:.3f} OGGM={:.3f} for A factor {}'
               ''.format(fs,
                         odf1.vol_itmix_m3, odf1.oggm, a_bounds[0],
                         odf2.vol_itmix_m3, odf2.oggm, a_bounds[1]))
        if apply_fs_on_mismatch and fs == 0 and odf2.oggm > odf2.vol_itmix_m3:
            return calibrate_inversion_from_consensus(gdirs,
                                                      ignore_missing=ignore_missing,
                                                      fs=5.7e-20, a_bounds=a_bounds,
                                                      apply_fs_on_mismatch=False,
                                                      error_on_mismatch=error_on_mismatch)
        if error_on_mismatch:
            raise ValueError(msg)

        out_fac = a_bounds[int(abs(odf1.vol_itmix_m3 - odf1.oggm) >
                               abs(odf2.vol_itmix_m3 - odf2.oggm))]
        log.workflow(msg)
        log.workflow('We use A factor = {} and fs = {} and move on.'
                     ''.format(out_fac, fs))

    # Compute the final volume with the correct A
    inversion_tasks(gdirs, glen_a=out_fac*def_a, fs=fs)
    df['vol_oggm_m3'] = execute_entity_task(tasks.get_inversion_volume, gdirs)
    return df
Exemplo n.º 8
0
def match_geodetic_mb_for_selection(gdirs,
                                    period='2000-01-01_2020-01-01',
                                    file_path=None,
                                    fail_safe=False):
    """Shift the mass-balance residual to match geodetic mb observations.

    It is similar to match_regional_geodetic_mb but uses the raw, glacier
    per glacier tabular data.

    This method finds the "best mass-balance residual" to match all glaciers in
    gdirs with available OGGM mass balance and available geodetic mass-balance
    measurements from Hugonnet 2021 or any other file with the same format.

    The default is to use hugonnet_2021_ds_rgi60_pergla_rates_10_20_worldwide_filled.hdf
    in  https://cluster.klima.uni-bremen.de/~oggm/geodetic_ref_mb/

    Parameters
    ----------
    gdirs : the list of gdirs
    period : str
       One of
       '2000-01-01_2020-01-01',
       '2000-01-01_2010-01-01',
       '2010-01-01_2020-01-01'.
    file_path: str
       local file path to tabular file containing geodetic measurements, file must
       contain the columns:
           - 'rgiid': is the RGIId as in the RGI 6.0
           - 'period': time intervall of the measurements in the format shown
             above
           - 'dmdtda': the specific-mass change rate in meters water-equivalent
             per year,
           - 'area': is the glacier area (same as in RGI 6.0) in meters square
    fail_safe : bool
        some glaciers in the obs data have been corrected with the regional
        average. We don't use these values, unless there is no other choice and
        in which case you can set fail_safe to True
    """

    # Get the mass-balance OGGM would give out of the box
    df = utils.compile_fixed_geometry_mass_balance(gdirs, path=False)
    df = df.dropna(axis=0, how='all').dropna(axis=1, how='all')

    # And also the Area and calving fluxes
    dfs = utils.compile_glacier_statistics(gdirs, path=False)

    y0 = int(period.split('_')[0].split('-')[0])
    y1 = int(period.split('_')[1].split('-')[0]) - 1

    odf = pd.DataFrame(df.loc[y0:y1].mean(), columns=['SMB'])

    odf['AREA'] = dfs.rgi_area_km2 * 1e6
    # Just take the calving rate and change its units
    # Original units: km3 a-1, to change to mm a-1 (units of specific MB)
    rho = cfg.PARAMS['ice_density']
    if 'calving_flux' in dfs:
        odf['CALVING'] = dfs['calving_flux'].fillna(
            0) * 1e9 * rho / odf['AREA']
    else:
        odf['CALVING'] = 0

    # We have to drop nans here, which occur when calving glaciers fail to run
    odf = odf.dropna()

    # save all rgi_ids for which a valid OGGM mb is available
    rgi_ids_oggm = odf.index.values

    # Fetch the reference data
    df = utils.get_geodetic_mb_dataframe(file_path=file_path)

    # get the correct period from the whole dataset
    df = df.loc[df['period'] == period]

    # get only geodetic measurements for which a valid OGGM mb is available
    rdf_all = df.loc[rgi_ids_oggm]
    if rdf_all.empty:
        raise InvalidWorkflowError('No geodetic MB measurements available for '
                                   'this glacier selection!')

    # drop glaciers with no valid geodetic measurements
    rdf = rdf_all.loc[~rdf_all['is_cor']]
    if rdf.empty:
        if not fail_safe:
            raise InvalidWorkflowError(
                'No gedoetic MB measurements available for '
                'this glacier selection! Set '
                'fail_safe=True to use the '
                'corrected values.')
        rdf = rdf_all

    # the remaining glaciers now have a OGGM mb and geodetic measurements
    rgi_ids = rdf.index.values
    msg = ('Applying geodetic MB correction using {} of {} glaciers, with '
           'available OGGM MB and available geodetic measurements.')
    log.workflow(msg.format(len(rgi_ids), len(gdirs)))

    # Total MB OGGM, only using glaciers with OGGM mb and geodetic measurements
    odf = odf.loc[rgi_ids]
    out_smb = np.average(odf['SMB'], weights=odf['AREA'])  # for logging
    out_cal = np.average(odf['CALVING'], weights=odf['AREA'])  # for logging
    smb_oggm = np.average(odf['SMB'] - odf['CALVING'], weights=odf['AREA'])

    # Total geodetic MB, no need for indexing
    smb_ref = rdf.dmdtda.values * 1000  # m to mm conversion
    area_ref = rdf.area.values
    smb_ref = np.average(smb_ref, weights=area_ref)

    # Diff between the two
    residual = smb_ref - smb_oggm

    # Let's just shift
    log.workflow('Shifting regional MB bias by {}'.format(residual))
    log.workflow('Observations give {}'.format(smb_ref))
    log.workflow('OGGM SMB gives {}'.format(out_smb))
    log.workflow('OGGM frontal ablation gives {}'.format(out_cal))

    # This time we shift over all glaciers
    for gdir in gdirs:
        try:
            df = gdir.read_json('local_mustar')
            gdir.add_to_diagnostics('mb_bias_before_geodetic_corr', df['bias'])
            df['bias'] = df['bias'] - residual
            gdir.write_json(df, 'local_mustar')
        except FileNotFoundError:
            pass
Exemplo n.º 9
0
def _reproject_and_scale(gdir, do_error=False):
    """Reproject and scale itslive data, avoid code duplication for error"""


    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    vnx = 'vx'
    vny = 'vy'
    if do_error:
        vnx += '_err'
        vny += '_err'

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg][vnx])
        fy = utils.file_downloader(region_files[reg][vny])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Error files are wrong
    if nodata == 0:
        nodata = -32767.0

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 1e-5  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in x map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vx.filled(np.nan)

        vn = 'obs_icevel_y'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in y map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vy.filled(np.nan)
Exemplo n.º 10
0
    def __init__(self,
                 gdir,
                 fls=None,
                 mu_star=None,
                 mb_model_class=PastMassBalance,
                 use_inversion_flowlines=False,
                 input_filesuffix='',
                 bias=None,
                 **kwargs):
        """Initialize.

        Parameters
        ----------
        gdir : GlacierDirectory
            the glacier directory
        mu_star : float or list of floats, optional
            set to the alternative value of mu* you want to use
            (the default is to use the calibrated value). Give a list of values
            for flowline-specific mu*
        fls : list, optional
            list of flowline objects to use (defaults to 'model_flowlines',
            and if not available, to 'inversion_flowlines')
        mb_model_class : class, optional
            the mass-balance model to use (e.g. PastMassBalance,
            ConstantMassBalance...)
        use_inversion_flowlines: bool, optional
            if True 'inversion_flowlines' instead of 'model_flowlines' will be
            used.
        input_filesuffix : str
            the file suffix of the input climate file
        bias : float, optional
            set to the alternative value of the calibration bias [mm we yr-1]
            you want to use (the default is to use the calibrated value)
            Note that this bias is *substracted* from the computed MB. Indeed:
            BIAS = MODEL_MB - REFERENCE_MB.
        kwargs : kwargs to pass to mb_model_class
        """

        # Read in the flowlines
        if use_inversion_flowlines:
            fls = gdir.read_pickle('inversion_flowlines')

        if fls is None:
            try:
                fls = gdir.read_pickle('model_flowlines')
            except FileNotFoundError:
                raise InvalidWorkflowError('Need a valid `model_flowlines` '
                                           'file. If you explicitly want to '
                                           'use `inversion_flowlines`, set '
                                           'use_inversion_flowlines=True.')

        self.fls = fls
        _y0 = kwargs.get('y0', None)

        # User mu*?
        if mu_star is not None:
            mu_star = tolist(mu_star, length=len(fls))
            for fl, mu in zip(self.fls, mu_star):
                fl.mu_star = mu

        # Initialise the mb models
        self.flowline_mb_models = []
        for fl in self.fls:
            # Merged glaciers will need different climate files, use filesuffix
            if (fl.rgi_id is not None) and (fl.rgi_id != gdir.rgi_id):
                rgi_filesuffix = '_' + fl.rgi_id + input_filesuffix
            else:
                rgi_filesuffix = input_filesuffix

            # merged glaciers also have a different MB bias from calibration
            if ((bias is None) and cfg.PARAMS['use_bias_for_run']
                    and (fl.rgi_id != gdir.rgi_id)):
                df = gdir.read_json('local_mustar', filesuffix='_' + fl.rgi_id)
                fl_bias = df['bias']
            else:
                fl_bias = bias

            # Constant and RandomMassBalance need y0 if not provided
            if (issubclass(mb_model_class, RandomMassBalance)
                    or issubclass(mb_model_class, ConstantMassBalance)) and (
                        fl.rgi_id != gdir.rgi_id) and (_y0 is None):

                df = gdir.read_json('local_mustar', filesuffix='_' + fl.rgi_id)
                kwargs['y0'] = df['t_star']

            self.flowline_mb_models.append(
                mb_model_class(gdir,
                               mu_star=fl.mu_star,
                               bias=fl_bias,
                               input_filesuffix=rgi_filesuffix,
                               **kwargs))

        self.valid_bounds = self.flowline_mb_models[-1].valid_bounds
        self.hemisphere = gdir.hemisphere
Exemplo n.º 11
0
    def __init__(self,
                 gdir,
                 mu_star=None,
                 bias=None,
                 filename='climate_historical',
                 input_filesuffix='',
                 repeat=False,
                 ys=None,
                 ye=None,
                 check_calib_params=True):
        """Initialize.

        Parameters
        ----------
        gdir : GlacierDirectory
            the glacier directory
        mu_star : float, optional
            set to the alternative value of mu* you want to use
            (the default is to use the calibrated value).
        bias : float, optional
            set to the alternative value of the calibration bias [mm we yr-1]
            you want to use (the default is to use the calibrated value)
            Note that this bias is *substracted* from the computed MB. Indeed:
            BIAS = MODEL_MB - REFERENCE_MB.
        filename : str, optional
            set to a different BASENAME if you want to use alternative climate
            data.
        input_filesuffix : str
            the file suffix of the input climate file
        repeat : bool
            Whether the climate period given by [ys, ye] should be repeated
            indefinitely in a circular way
        ys : int
            The start of the climate period where the MB model is valid
            (default: the period with available data)
        ye : int
            The end of the climate period where the MB model is valid
            (default: the period with available data)
        check_calib_params : bool
            OGGM will try hard not to use wrongly calibrated mu* by checking
            the parameters used during calibration and the ones you are
            using at run time. If they don't match, it will raise an error.
            Set to False to suppress this check.

        Attributes
        ----------
        temp_bias : float, default 0
            Add a temperature bias to the time series
        prcp_bias : float, default 1
            Precipitation factor to the time series (called bias for
            consistency with `temp_bias`)
        """

        super(PastMassBalance, self).__init__()
        self.valid_bounds = [-1e4, 2e4]  # in m
        if mu_star is None:
            df = gdir.read_json('local_mustar')
            mu_star = df['mu_star_glacierwide']
            if check_calib_params:
                if not df['mu_star_allsame']:
                    msg = ('You seem to use the glacier-wide mu* to compute '
                           'the mass-balance although this glacier has '
                           'different mu* for its flowlines. Set '
                           '`check_calib_params=False` to prevent this '
                           'error.')
                    raise InvalidWorkflowError(msg)

        if bias is None:
            if cfg.PARAMS['use_bias_for_run']:
                df = gdir.read_json('local_mustar')
                bias = df['bias']
            else:
                bias = 0.

        self.mu_star = mu_star
        self.bias = bias

        # Parameters
        self.t_solid = cfg.PARAMS['temp_all_solid']
        self.t_liq = cfg.PARAMS['temp_all_liq']
        self.t_melt = cfg.PARAMS['temp_melt']
        prcp_fac = cfg.PARAMS['prcp_scaling_factor']
        default_grad = cfg.PARAMS['temp_default_gradient']

        # Check the climate related params to the GlacierDir to make sure
        if check_calib_params:
            mb_calib = gdir.get_climate_info()['mb_calib_params']
            for k, v in mb_calib.items():
                if v != cfg.PARAMS[k]:
                    msg = ('You seem to use different mass-balance parameters '
                           'than used for the calibration. Set '
                           '`check_calib_params=False` to ignore this '
                           'warning.')
                    raise InvalidWorkflowError(msg)

        # Public attrs
        self.hemisphere = gdir.hemisphere
        self.temp_bias = 0.
        self.prcp_bias = 1.
        self.repeat = repeat

        # Read file
        fpath = gdir.get_filepath(filename, filesuffix=input_filesuffix)
        with ncDataset(fpath, mode='r') as nc:
            # time
            time = nc.variables['time']
            time = netCDF4.num2date(time[:], time.units)
            ny, r = divmod(len(time), 12)
            if r != 0:
                raise ValueError('Climate data should be N full years')
            # This is where we switch to hydro float year format
            # Last year gives the tone of the hydro year
            self.years = np.repeat(
                np.arange(time[-1].year - ny + 1, time[-1].year + 1), 12)
            self.months = np.tile(np.arange(1, 13), ny)
            # Read timeseries
            self.temp = nc.variables['temp'][:]
            self.prcp = nc.variables['prcp'][:] * prcp_fac
            if 'gradient' in nc.variables:
                grad = nc.variables['gradient'][:]
                # Security for stuff that can happen with local gradients
                g_minmax = cfg.PARAMS['temp_local_gradient_bounds']
                grad = np.where(~np.isfinite(grad), default_grad, grad)
                grad = clip_array(grad, g_minmax[0], g_minmax[1])
            else:
                grad = self.prcp * 0 + default_grad
            self.grad = grad
            self.ref_hgt = nc.ref_hgt
            self.ys = self.years[0] if ys is None else ys
            self.ye = self.years[-1] if ye is None else ye
Exemplo n.º 12
0
def calibrate_inversion_from_consensus_estimate(gdirs, ignore_missing=False):
    """Fit the total volume of the glaciers to the 2019 consensus estimate.

    This method finds the "best Glen A" to match all glaciers in gdirs with
    a valid inverted volume.

    Parameters
    ----------
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        the glacier directories to process
    ignore_missing : bool
        set this to true to silence the error if some glaciers could not be
        found in the consensus estimate.

    Returns
    -------
    a dataframe with the individual glacier volumes
    """

    gdirs = utils.tolist(gdirs)

    # Get the ref data for the glaciers we have
    df = pd.read_hdf(utils.get_demo_file('rgi62_itmix_df.h5'))
    rids = [gdir.rgi_id for gdir in gdirs]

    found_ids = df.index.intersection(rids)
    if not ignore_missing and (len(found_ids) != len(rids)):
        raise InvalidWorkflowError('Could not find matching indices in the '
                                   'consensus estimate for all provided '
                                   'glaciers. Set ignore_missing=True to '
                                   'ignore this error.')

    df = df.reindex(rids)

    def_a = cfg.PARAMS['inversion_glen_a']
    a_bounds = [0.1, 10]

    # Optimize the diff to ref
    def to_minimize(x):

        cfg.PARAMS['inversion_glen_a'] = x * def_a
        vols = execute_entity_task(tasks.mass_conservation_inversion, gdirs)
        _df = df.copy()
        _df['oggm'] = vols
        _df = _df.dropna()
        return _df.vol_itmix_m3.sum() - _df.oggm.sum()

    out_fac, r = optimization.brentq(to_minimize,
                                     *a_bounds,
                                     rtol=1e-2,
                                     full_output=True)
    if r.converged:
        log.workflow('calibrate_inversion_from_consensus_estimate '
                     'converged after {} iterations. The resulting Glen A '
                     'factor is {}.'.format(r.iterations, out_fac))
    else:
        raise RuntimeError('Unexpected error')

    # Compute the final volume with the correct A
    cfg.PARAMS['inversion_glen_a'] = out_fac * def_a
    vols = execute_entity_task(tasks.mass_conservation_inversion, gdirs)
    df['vol_oggm_m3'] = vols
    return df
Exemplo n.º 13
0
def execute_entity_task(task, gdirs, **kwargs):
    """Execute a task on gdirs.

    If you asked for multiprocessing, it will do it.

    If ``task`` has more arguments than `gdir` they have to be keyword
    arguments.

    Parameters
    ----------
    task : function or sequence of functions
         The entity task(s) to apply.
         Can be None, in which case each gdir is expected to be a tuple of (task, gdir).
         When passing a sequence, each item can also optionally be a tuple of (task, dictionary).
         In this case the dictionary items will be passed to the task as kwargs.
    gdirs : list of :py:class:`oggm.GlacierDirectory` objects
        The glacier directories to process.
        Each individual gdir can optionally be a tuple of (gdir, dictionary).
        In this case, the values in the dictionary will be passed to the task as
        keyword arguments for that specific gdir.

    Returns
    -------
    List of results from task. Last task if a list of tasks was given.
    """

    # Normalize task into list of tuples for simplicity
    if not isinstance(task, Sequence):
        task = [task]
    tasks = []
    for t in task:
        if isinstance(t, tuple):
            tasks.append(t)
        else:
            tasks.append((t, {}))

    # Reject global tasks
    for t in tasks:
        if t[0].__dict__.get('is_global_task', False):
            raise InvalidWorkflowError('execute_entity_task cannot be used on '
                                       'global tasks.')

    # Should be iterable
    gdirs = utils.tolist(gdirs)
    ng = len(gdirs)
    if ng == 0:
        log.workflow('Called execute_entity_task on 0 glaciers. Returning...')
        return

    log.workflow('Execute entity tasks [%s] on %d glaciers',
                 ', '.join([t[0].__name__ for t in tasks]), ng)

    pc = _pickle_copier(tasks, kwargs)

    if _have_ogmpi:
        if ogmpi.OGGM_MPI_COMM is not None:
            return ogmpi.mpi_master_spin_tasks(pc, gdirs)

    if cfg.PARAMS['use_multiprocessing'] and ng > 1:
        mppool = init_mp_pool(cfg.CONFIG_MODIFIED)
        out = mppool.map(pc, gdirs, chunksize=1)
    else:
        if ng > 3:
            log.workflow(
                'WARNING: you are trying to run an entity task on '
                '%d glaciers with multiprocessing turned off. OGGM '
                'will run faster with multiprocessing turned on.', ng)
        out = [pc(gdir) for gdir in gdirs]

    return out
Exemplo n.º 14
0
def velocity_to_gdir(gdir):
    """Reproject the its_live files to the given glacier directory.

    Variables are added to the gridded_data nc file.

    Reprojecting velocities from one map proj to another is done
    reprojecting the vector distances. In this process, absolute velocities
    might change as well because map projections do not always preserve
    distances -> we scale them back to the original velocities as per the
    ITS_LIVE documentation that states that velocities are given in
    ground units, i.e. absolute velocities.

    We use bilinear interpolation to reproject the velocities to the local
    glacier map.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data

    """

    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    if not gdir.has_file('gridded_data'):
        raise InvalidWorkflowError('Please run `glacier_masks` before running '
                                   'this task')

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg]['vx'])
        fy = utils.file_downloader(region_files[reg]['vy'])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 0.1  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in x map direction'
        v[:] = vx

        vn = 'obs_icevel_y'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in xy map direction'
        v[:] = vy