Пример #1
0
def region_grid(reg):

    global region_grids

    if reg not in region_grids:
        with utils.get_lock():
            fp = utils.file_downloader(region_files[reg]['vx'])
            ds = salem.GeoTiff(fp)
            region_grids[reg] = ds.grid

    return region_grids[reg]
Пример #2
0
def process_lmr_data(gdir,
                     fpath_temp=None,
                     fpath_precip=None,
                     year_range=('1951', '1980'),
                     filesuffix='',
                     **kwargs):
    """Read, process and store the Last Millennium Reanalysis (LMR) data for this glacier.

    LMR data: https://atmos.washington.edu/~hakim/lmr/LMRv2/

    LMR data is annualised in anomaly format relative to 1951-1980. We
    create synthetic timeseries from the reference data.

    It stores the data in a format that can be used by the OGGM mass balance
    model and in the glacier directory.

    Parameters
    ----------
    fpath_temp : str
        path to the temp file (default: LMR v2.1 from server above)
    fpath_precip : str
        path to the precip file (default: LMR v2.1 from server above)
    year_range : tuple of str
        the year range for which you want to compute the anomalies. Default
        for LMR is `('1951', '1980')`
    filesuffix : str
        append a suffix to the filename (useful for ensemble experiments).

    **kwargs: any kwarg to be passed to ref:`process_gcm_data`
    """

    # Get the path of GCM temperature & precipitation data
    base_url = 'https://atmos.washington.edu/%7Ehakim/lmr/LMRv2/'
    if fpath_temp is None:
        with utils.get_lock():
            fpath_temp = utils.file_downloader(
                base_url + 'air_MCruns_ensemble_mean_LMRv2.1.nc')
    if fpath_precip is None:
        with utils.get_lock():
            fpath_precip = utils.file_downloader(
                base_url + 'prate_MCruns_ensemble_mean_LMRv2.1.nc')

    # Glacier location
    glon = gdir.cenlon
    glat = gdir.cenlat

    # Read the GCM files
    with xr.open_dataset(fpath_temp, use_cftime=True) as tempds, \
            xr.open_dataset(fpath_precip, use_cftime=True) as precipds:

        # Check longitude conventions
        if tempds.lon.min() >= 0 and glon <= 0:
            glon += 360

        # Take the closest to the glacier
        # Should we consider GCM interpolation?
        temp = tempds.air.sel(lat=glat, lon=glon, method='nearest')
        precip = precipds.prate.sel(lat=glat, lon=glon, method='nearest')

        # Currently we just take the mean of the ensemble, although
        # this is probably not advised. The GCM climate will correct
        # anyways
        temp = temp.mean(dim='MCrun')
        precip = precip.mean(dim='MCrun')

        # Precip unit is kg/m^2/s we convert to mm month since we apply the anomaly after
        precip = precip * 30.5 * (60 * 60 * 24)

        # Back to [-180, 180] for OGGM
        temp.lon.values = temp.lon if temp.lon <= 180 else temp.lon - 360
        precip.lon.values = precip.lon if precip.lon <= 180 else precip.lon - 360

    # OK now we have to turn these annual timeseries in monthly data
    # We take the ref climate
    fpath = gdir.get_filepath('climate_historical')
    with xr.open_dataset(fpath) as ds_ref:
        ds_ref = ds_ref.sel(time=slice(*year_range))

        loc_tmp = ds_ref.temp.groupby('time.month').mean()
        loc_pre = ds_ref.prcp.groupby('time.month').mean()

        # Make time coord
        t = np.cumsum([31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] *
                      len(temp))
        t = cftime.num2date(np.append([0], t[:-1]),
                            'days since 0000-01-01 00:00:00',
                            calendar='noleap')

        temp = xr.DataArray(
            (loc_tmp.data + temp.data[:, np.newaxis]).flatten(),
            coords={
                'time': t,
                'lon': temp.lon,
                'lat': temp.lat
            },
            dims=('time', ))

        # For precip the std dev is very small - lets keep it as is for now but
        # this is a bit ridiculous. We clip to zero here to be sure
        precip = utils.clip_min(
            (loc_pre.data + precip.data[:, np.newaxis]).flatten(), 0)
        precip = xr.DataArray(precip,
                              dims=('time', ),
                              coords={
                                  'time': t,
                                  'lon': temp.lon,
                                  'lat': temp.lat
                              })

    process_gcm_data(gdir,
                     filesuffix=filesuffix,
                     prcp=precip,
                     temp=temp,
                     year_range=year_range,
                     calendar='noleap',
                     source='lmr',
                     **kwargs)
Пример #3
0
def _reproject_and_scale(gdir, do_error=False):
    """Reproject and scale itslive data, avoid code duplication for error"""


    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    vnx = 'vx'
    vny = 'vy'
    if do_error:
        vnx += '_err'
        vny += '_err'

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg][vnx])
        fy = utils.file_downloader(region_files[reg][vny])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Error files are wrong
    if nodata == 0:
        nodata = -32767.0

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 1e-5  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in x map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vx.filled(np.nan)

        vn = 'obs_icevel_y'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in y map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vy.filled(np.nan)
Пример #4
0
def velocity_to_gdir(gdir):
    """Reproject the its_live files to the given glacier directory.

    Variables are added to the gridded_data nc file.

    Reprojecting velocities from one map proj to another is done
    reprojecting the vector distances. In this process, absolute velocities
    might change as well because map projections do not always preserve
    distances -> we scale them back to the original velocities as per the
    ITS_LIVE documentation that states that velocities are given in
    ground units, i.e. absolute velocities.

    We use bilinear interpolation to reproject the velocities to the local
    glacier map.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data

    """

    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    if not gdir.has_file('gridded_data'):
        raise InvalidWorkflowError('Please run `glacier_masks` before running '
                                   'this task')

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg]['vx'])
        fy = utils.file_downloader(region_files[reg]['vy'])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 0.1  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in x map direction'
        v[:] = vx

        vn = 'obs_icevel_y'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in xy map direction'
        v[:] = vy