Пример #1
0
    def test_read_to_grid(self):

        g = GeoTiff(utils.get_demo_file('hef_srtm.tif'))
        sf = utils.get_demo_file('Hintereisferner_UTM.shp')

        df1 = read_shapefile_to_grid(sf, g.grid)

        df2 = transform_geopandas(read_shapefile(sf), to_crs=g.grid)
        assert_allclose(df1.geometry[0].exterior.coords,
                        df2.geometry[0].exterior.coords)

        # test for caching
        d = g.grid.to_dict()
        # change key ordering by chance
        d2 = dict((k, v) for k, v in d.items())

        from salem.sio import _memory_shapefile_to_grid, cached_shapefile_path
        shape_cpath = cached_shapefile_path(sf)
        res = _memory_shapefile_to_grid.call_and_shelve(shape_cpath,
                                                        grid=g.grid,
                                                        **d)
        h1 = res.argument_hash
        res = _memory_shapefile_to_grid.call_and_shelve(shape_cpath,
                                                        grid=g.grid,
                                                        **d2)
        h2 = res.argument_hash
        self.assertEqual(h1, h2)
Пример #2
0
    def test_read_to_grid(self):

        g = GeoTiff(utils.get_demo_file("hef_srtm.tif"))
        sf = utils.get_demo_file("Hintereisferner_UTM.shp")

        df1 = read_shapefile_to_grid(sf, g.grid)

        df2 = transform_geopandas(read_shapefile(sf), to_crs=g.grid)
        assert_allclose(df1.geometry[0].exterior.coords, df2.geometry[0].exterior.coords)
Пример #3
0
    def test_read_to_grid(self):

        g = GeoTiff(utils.get_demo_file('hef_srtm.tif'))
        sf = utils.get_demo_file('Hintereisferner_UTM.shp')

        df1 = read_shapefile_to_grid(sf, g.grid)

        df2 = transform_geopandas(read_shapefile(sf), to_crs=g.grid)
        assert_allclose(df1.geometry[0].exterior.coords,
                        df2.geometry[0].exterior.coords)
Пример #4
0
def catchment_intersections(gdir, div_id=None):
    """Computes the intersections between the catchments.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
    """

    catchment_indices = gdir.read_pickle('catchment_indices', div_id=div_id)
    xmesh, ymesh = np.meshgrid(np.arange(0, gdir.grid.nx, 1),
                               np.arange(0, gdir.grid.ny, 1))

    # Loop over the lines
    mask = np.zeros((gdir.grid.ny, gdir.grid.nx))

    gdfc = gpd.GeoDataFrame()
    for i, ci in enumerate(catchment_indices):
        # Catchment polygon
        mask[:] = 0
        mask[tuple(ci.T)] = 1
        _, poly_no = _mask_to_polygon(mask, x=xmesh, y=ymesh, gdir=gdir)
        gdfc.loc[i, 'geometry'] = poly_no

    gdfi = polygon_intersections(gdfc)

    # We project them onto the mercator proj before writing. This is a bit
    # inefficient (they'll be projected back later), but it's more sustainable
    gdfc.crs = gdir.grid
    gdfi.crs = gdir.grid
    salem.transform_geopandas(gdfc, gdir.grid.proj, inplace=True)
    salem.transform_geopandas(gdfi, gdir.grid.proj, inplace=True)
    if hasattr(gdfc.crs, 'srs'):
        # salem uses pyproj
        gdfc.crs = gdfc.crs.srs
        gdfi.crs = gdfi.crs.srs
    gdfc.to_file(gdir.get_filepath('flowline_catchments', div_id=div_id))
    if len(gdfi) > 0:
        gdfi.to_file(gdir.get_filepath('catchments_intersects',
                                       div_id=div_id))
Пример #5
0
def catchment_intersections(gdir, div_id=None):
    """Computes the intersections between the catchments.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
    """

    catchment_indices = gdir.read_pickle('catchment_indices', div_id=div_id)
    xmesh, ymesh = np.meshgrid(np.arange(0, gdir.grid.nx, 1),
                               np.arange(0, gdir.grid.ny, 1))

    # Loop over the lines
    mask = np.zeros((gdir.grid.ny, gdir.grid.nx))

    gdfc = gpd.GeoDataFrame()
    for i, ci in enumerate(catchment_indices):
        # Catchment polygon
        mask[:] = 0
        mask[tuple(ci.T)] = 1
        _, poly_no = _mask_to_polygon(mask, x=xmesh, y=ymesh, gdir=gdir)
        gdfc.loc[i, 'geometry'] = poly_no

    gdfi = utils.polygon_intersections(gdfc)

    # We project them onto the mercator proj before writing. This is a bit
    # inefficient (they'll be projected back later), but it's more sustainable
    gdfc.crs = gdir.grid
    gdfi.crs = gdir.grid
    salem.transform_geopandas(gdfc, gdir.grid.proj, inplace=True)
    salem.transform_geopandas(gdfi, gdir.grid.proj, inplace=True)
    if hasattr(gdfc.crs, 'srs'):
        # salem uses pyproj
        gdfc.crs = gdfc.crs.srs
        gdfi.crs = gdfi.crs.srs
    gdfc.to_file(gdir.get_filepath('flowline_catchments', div_id=div_id))
    if len(gdfi) > 0:
        gdfi.to_file(gdir.get_filepath('catchments_intersects',
                                       div_id=div_id))
Пример #6
0
Файл: tasks.py Проект: OGGM/g2ti
def define_g2ti_glacier(path=None, base_dir=None):

    fname = os.path.join(path, 'outlines.shp')
    ent = gpd.read_file(fname)
    rid = ent.RGIId.values[0]
    if '5a' in rid:
        rid = rid.replace('5a', '60')

    ent['RGIId'] = rid
    ent['Name'] = '' if ent['Name'][0] == 'None' else ent['Name']
    gdir = utils.GlacierDirectory(ent.iloc[0], base_dir=base_dir)
    ent.to_file(gdir.get_filepath('outlines'))

    proj_out = salem.check_crs(ent.crs)

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    if len(gdf) > 0:
        gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) |
                       (gdf.RGIId_2 == gdir.rgi_id))]
        if len(gdf) > 0:
            gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
            if hasattr(gdf.crs, 'srs'):
                # salem uses pyproj
                gdf.crs = gdf.crs.srs
            gdf.to_file(gdir.get_filepath('intersects'))
    else:
        # Sanity check
        if cfg.PARAMS['use_intersects']:
            raise RuntimeError('You seem to have forgotten to set the '
                               'intersects file for this run. OGGM works '
                               'better with such a file. If you know what '
                               'your are doing, set '
                               "cfg.PARAMS['use_intersects'] = False to "
                               "suppress this error.")

    # Topo
    shutil.copy(os.path.join(path, 'dem.tif'), gdir.get_filepath('dem'))
    mpath = gdir.get_filepath('dem').replace('dem', 'g2ti_mask')
    shutil.copy(os.path.join(path, 'mask.tif'), mpath)

    # Grid
    ds = salem.GeoTiff(gdir.get_filepath('dem'))
    ds.grid.to_json(gdir.get_filepath('glacier_grid'))
    gdir.write_pickle(['G2TI'], 'dem_source')

    return gdir
Пример #7
0
def define_glacier_region(gdir, entity=None):
    """
    Very first task: define the glacier's local grid.

    Defines the local projection (Transverse Mercator), centered on the
    glacier. There is some options to set the resolution of the local grid.
    It can be adapted depending on the size of the glacier with::

        dx (m) = d1 * AREA (km) + d2 ; clipped to dmax

    or be set to a fixed value. See ``params.cfg`` for setting these options.
    Default values of the adapted mode lead to a resolution of 50 m for
    Hintereisferner, which is approx. 8 km2 large.
    After defining the grid, the topography and the outlines of the glacier
    are transformed into the local projection. The default interpolation for
    the topography is `cubic`.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    entity : geopandas GeoSeries
        the glacier geometry to process
    """

    # choose a spatial resolution with respect to the glacier area
    dxmethod = cfg.PARAMS['grid_dx_method']
    area = gdir.rgi_area_km2
    if dxmethod == 'linear':
        dx = np.rint(cfg.PARAMS['d1'] * area + cfg.PARAMS['d2'])
    elif dxmethod == 'square':
        dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
    elif dxmethod == 'fixed':
        dx = np.rint(cfg.PARAMS['fixed_dx'])
    else:
        raise ValueError('grid_dx_method not supported: {}'.format(dxmethod))
    # Additional trick for varying dx
    if dxmethod in ['linear', 'square']:
        dx = np.clip(dx, cfg.PARAMS['d2'], cfg.PARAMS['dmax'])

    log.debug('%s: area %.2f km, dx=%.1f', gdir.rgi_id, area, dx)

    # Make a local glacier map
    proj_params = dict(name='tmerc',
                       lat_0=0.,
                       lon_0=gdir.cenlon,
                       k=0.9996,
                       x_0=0,
                       y_0=0,
                       datum='WGS84')
    proj4_str = "+proj={name} +lat_0={lat_0} +lon_0={lon_0} +k={k} " \
                "+x_0={x_0} +y_0={y_0} +datum={datum}".format(**proj_params)
    proj_in = pyproj.Proj("+init=EPSG:4326", preserve_units=True)
    proj_out = pyproj.Proj(proj4_str, preserve_units=True)
    project = partial(pyproj.transform, proj_in, proj_out)
    # transform geometry to map
    geometry = shapely.ops.transform(project, entity['geometry'])
    geometry = _check_geometry(geometry)
    xx, yy = geometry.exterior.xy

    # Corners, incl. a buffer of N pix
    ulx = np.min(xx) - cfg.PARAMS['border'] * dx
    lrx = np.max(xx) + cfg.PARAMS['border'] * dx
    uly = np.max(yy) + cfg.PARAMS['border'] * dx
    lry = np.min(yy) - cfg.PARAMS['border'] * dx
    # n pixels
    nx = np.int((lrx - ulx) / dx)
    ny = np.int((uly - lry) / dx)

    # Back to lon, lat for DEM download/preparation
    tmp_grid = salem.Grid(proj=proj_out,
                          nxny=(nx, ny),
                          x0y0=(ulx, uly),
                          dxdy=(dx, -dx),
                          pixel_ref='corner')
    minlon, maxlon, minlat, maxlat = tmp_grid.extent_in_crs(crs=salem.wgs84)

    # save transformed geometry to disk
    entity = entity.copy()
    entity['geometry'] = geometry
    # Avoid fiona bug: https://github.com/Toblerity/Fiona/issues/365
    for k, s in entity.iteritems():
        if type(s) in [np.int32, np.int64]:
            entity[k] = int(s)
    towrite = gpd.GeoDataFrame(entity).T
    towrite.crs = proj4_str
    # Delete the source before writing
    if 'DEM_SOURCE' in towrite:
        del towrite['DEM_SOURCE']
    towrite.to_file(gdir.get_filepath('outlines'))

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    gdf = gdf.loc[(gdf.RGIId_1 == gdir.rgi_id) | (gdf.RGIId_2 == gdir.rgi_id)]
    if len(gdf) > 0:
        gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
        if hasattr(gdf.crs, 'srs'):
            # salem uses pyproj
            gdf.crs = gdf.crs.srs
        gdf.to_file(gdir.get_filepath('intersects'))

    # Open DEM
    source = entity.DEM_SOURCE if hasattr(entity, 'DEM_SOURCE') else None
    dem_list, dem_source = get_topo_file((minlon, maxlon), (minlat, maxlat),
                                         rgi_region=gdir.rgi_region,
                                         source=source)
    log.debug('%s: DEM source: %s', gdir.rgi_id, dem_source)

    # A glacier area can cover more than one tile:
    if len(dem_list) == 1:
        dem_dss = [rasterio.open(dem_list[0])]  # if one tile, just open it
        dem_data = rasterio.band(dem_dss[0], 1)
        src_transform = dem_dss[0].transform
    else:
        dem_dss = [rasterio.open(s) for s in dem_list]  # list of rasters
        dem_data, src_transform = merge_tool(dem_dss)  # merged rasters

    # Use Grid properties to create a transform (see rasterio cookbook)
    dst_transform = rasterio.transform.from_origin(
        ulx,
        uly,
        dx,
        dx  # sign change (2nd dx) is done by rasterio.transform
    )

    # Set up profile for writing output
    profile = dem_dss[0].profile
    profile.update({
        'crs': proj4_str,
        'transform': dst_transform,
        'width': nx,
        'height': ny
    })

    # Could be extended so that the cfg file takes all Resampling.* methods
    if cfg.PARAMS['topo_interp'] == 'bilinear':
        resampling = Resampling.bilinear
    elif cfg.PARAMS['topo_interp'] == 'cubic':
        resampling = Resampling.cubic
    else:
        raise ValueError('{} interpolation not understood'.format(
            cfg.PARAMS['topo_interp']))

    dem_reproj = gdir.get_filepath('dem')
    with rasterio.open(dem_reproj, 'w', **profile) as dest:
        dst_array = np.empty((ny, nx), dtype=dem_dss[0].dtypes[0])
        reproject(
            # Source parameters
            source=dem_data,
            src_crs=dem_dss[0].crs,
            src_transform=src_transform,
            # Destination parameters
            destination=dst_array,
            dst_transform=dst_transform,
            dst_crs=proj4_str,
            # Configuration
            resampling=resampling)

        dest.write(dst_array, 1)

    for dem_ds in dem_dss:
        dem_ds.close()

    # Glacier grid
    x0y0 = (ulx + dx / 2, uly - dx / 2)  # To pixel center coordinates
    glacier_grid = salem.Grid(proj=proj_out,
                              nxny=(nx, ny),
                              dxdy=(dx, -dx),
                              x0y0=x0y0)
    glacier_grid.to_json(gdir.get_filepath('glacier_grid'))
    gdir.write_pickle(dem_source, 'dem_source')

    # Looks in the database if the glacier has divides.
    gdf = cfg.PARAMS['divides_gdf']
    if gdir.rgi_id in gdf.index.values:
        divdf = [g for g in gdf.loc[gdir.rgi_id].geometry]

        # Reproject the shape
        def proj(lon, lat):
            return salem.transform_proj(salem.wgs84, gdir.grid.proj, lon, lat)

        divdf = [shapely.ops.transform(proj, g) for g in divdf]

        # Keep only the ones large enough
        log.debug('%s: divide candidates: %d', gdir.rgi_id, len(divdf))
        divdf = [g for g in divdf if (g.area >= (25 * dx**2))]
        log.debug('%s: number of divides: %d', gdir.rgi_id, len(divdf))

        # Write the directories and the files
        for i, g in enumerate(divdf):
            _dir = os.path.join(gdir.dir, 'divide_{0:0=2d}'.format(i + 1))
            if not os.path.exists(_dir):
                os.makedirs(_dir)
            # File
            entity['geometry'] = g
            towrite = gpd.GeoDataFrame(entity).T
            towrite.crs = proj4_str
            towrite.to_file(os.path.join(_dir, cfg.BASENAMES['outlines']))
    else:
        # Make a single directory and link the files
        log.debug('%s: number of divides: %d', gdir.rgi_id, 1)
        _dir = os.path.join(gdir.dir, 'divide_01')
        if not os.path.exists(_dir):
            os.makedirs(_dir)
        linkname = os.path.join(_dir, cfg.BASENAMES['outlines'])
        sourcename = gdir.get_filepath('outlines')
        for ending in ['.cpg', '.dbf', '.shp', '.shx', '.prj']:
            _s = sourcename.replace('.shp', ending)
            _l = linkname.replace('.shp', ending)
            if os.path.exists(_s):
                try:
                    # we are on UNIX
                    os.link(_s, _l)
                except AttributeError:
                    # we are on windows
                    copyfile(_s, _l)
Пример #8
0
def distance_from_border(gdir, sqrt=False):
    """Computes a normalized distance from border mask.

    It is a bit cleverer than just taking the glacier outlines: we also
    consider where the glacier has neighbors.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
        the working glacier directory
    sqrt : bool, optional
        whether the square root of the distance mask should be used or not

    Returns
    -------
    the distance
    """

    # Variables
    grids_file = gdir.get_filepath('gridded_data')
    with netCDF4.Dataset(grids_file) as nc:
        glacier_mask = nc.variables['glacier_mask'][:]

    # Glacier exterior including nunataks
    erode = binary_erosion(glacier_mask)
    glacier_ext = glacier_mask ^ erode
    glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)

    # Intersects between glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('intersects'):
        # read and transform to grid
        gdf = gdir.read_shapefile('intersects')
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    dx = gdir.grid.dx

    # Here we check which grid points are on ice divides
    # Probably not the fastest way to do this, but it works
    dist = np.array([])
    jj, ii = np.where(glacier_ext)
    for j, i in zip(jj, ii):
        dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        pok = np.where(dist <= 1)
    glacier_ext_intersect = glacier_ext * 0
    glacier_ext_intersect[jj[pok], ii[pok]] = 1

    # Scipy does the job
    dis_from_border = 1 + glacier_ext_intersect - glacier_ext
    dis_from_border = distance_transform_edt(dis_from_border) * dx
    dis_from_border[np.where(glacier_mask == 0)] = np.NaN

    # Square?
    if sqrt:
        dis_from_border = np.sqrt(dis_from_border)

    # We normalize and return
    return dis_from_border / np.nansum(dis_from_border)
Пример #9
0
def catchment_width_geom(gdir, div_id=None):
    """Compute geometrical catchment widths for each point of the flowlines.

    Updates the 'inversion_flowlines' save file.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
    """

    # variables
    flowlines = gdir.read_pickle('inversion_flowlines', div_id=div_id)
    catchment_indices = gdir.read_pickle('catchment_indices', div_id=div_id)
    xmesh, ymesh = np.meshgrid(np.arange(0, gdir.grid.nx, 1),
                               np.arange(0, gdir.grid.ny, 1))

    # Topography is to filter the unrealistic lines afterwards.
    # I take the non-smoothed topography
    # I remove the boundary pixs because they are likely to be higher
    fpath = gdir.get_filepath('gridded_data', div_id=div_id)
    with netCDF4.Dataset(fpath) as nc:
        topo = nc.variables['topo'][:]
        mask_ext = nc.variables['glacier_ext'][:]
        mask_glacier = nc.variables['glacier_mask'][:]
    topo[np.where(mask_glacier == 0)] = np.NaN
    topo[np.where(mask_ext == 1)] = np.NaN

    # Intersects between catchments/glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('catchments_intersects', div_id=div_id):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('catchments_intersects',
                                              div_id=div_id))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])
    if gdir.has_file('divides_intersects', div_id=0):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('divides_intersects'))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])
    if gdir.has_file('intersects', div_id=0):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('intersects', div_id=0))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    # apply a buffer to be sure we get the intersects right. Be generous
    gdfi = gdfi.buffer(1.5)

    # Filter parameters
    # Number of pixels to arbitrarily remove at junctions
    jpix = int(cfg.PARAMS['flowline_junction_pix'])

    # Loop over the lines
    mask = np.zeros((gdir.grid.ny, gdir.grid.nx))
    for fl, ci in zip(flowlines, catchment_indices):

        n = len(fl.dis_on_line)

        widths = np.zeros(n)
        wlines = []

        # Catchment polygon
        mask[:] = 0
        mask[tuple(ci.T)] = 1
        poly, poly_no = _mask_to_polygon(mask, x=xmesh, y=ymesh, gdir=gdir)

        # First guess widths
        for i, (normal, pcoord) in enumerate(zip(fl.normals, fl.line.coords)):
            width, wline = _point_width(normal, pcoord, fl, poly, poly_no)
            widths[i] = width
            wlines.append(wline)

        valid = np.where(np.isfinite(widths))
        if len(valid[0]) == 0:
            errmsg = '{}: first guess widths went wrong.'.format(gdir.rgi_id)
            raise RuntimeError(errmsg)

        # Ok now the entire centerline is computed.
        # I take all these widths for geometrically valid, and see if they
        # intersect with our buffered catchment/glacier intersections
        touches_border = []
        for wg in wlines:
            touches_border.append(np.any(gdfi.intersects(wg)))
        touches_border = _filter_grouplen(touches_border, minsize=5)

        # we filter the lines which have a large altitude range
        fil_widths = _filter_for_altitude_range(widths, wlines, topo)

        # Filter +- widths at junction points
        for fid in fl.inflow_indices:
            i0 = np.clip(fid-jpix, jpix/2, n-jpix/2).astype(np.int64)
            i1 = np.clip(fid+jpix+1, jpix/2, n-jpix/2).astype(np.int64)
            fil_widths[i0:i1] = np.NaN

        valid = np.where(np.isfinite(fil_widths))
        if len(valid[0]) == 0:
            # This happens very rarely. Just pick the middle and
            # the correction task should do the rest
            log.warning('{}: width filtering too strong.'.format(gdir.rgi_id))
            fil_widths = widths[np.int(len(widths) / 2.)]

        # "Touches border" is a badly chosen name. In fact, it should be
        # called "is_rectangular" since tidewater glaciers have a special
        # treatment here
        if gdir.is_tidewater and fl.flows_to is None:
            touches_border[-5:] = True

        # Write it in the objects attributes
        assert len(fil_widths) == n
        fl.widths = fil_widths
        fl.geometrical_widths = wlines
        fl.touches_border = touches_border

    # Overwrite pickle
    gdir.write_pickle(flowlines, 'inversion_flowlines', div_id=div_id)
Пример #10
0
Файл: tasks.py Проект: OGGM/g2ti
def _add_2d_interp_masks(gdir):
    """Computes the interpolation masks and adds them to the file."""

    # Variables
    grids_file = gdir.get_filepath('gridded_data')
    with netCDF4.Dataset(grids_file) as nc:
        topo = nc.variables['topo_smoothed'][:]
        glacier_mask = nc.variables['glacier_mask'][:]

    # Glacier exterior including nunataks
    erode = binary_erosion(glacier_mask)
    glacier_ext = glacier_mask ^ erode
    glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)

    # Intersects between glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('intersects'):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('intersects'))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    dx = gdir.grid.dx

    # Distance from border mask
    # Probably not the fastest way to do this, but it works
    dist = np.array([])
    jj, ii = np.where(glacier_ext)
    for j, i in zip(jj, ii):
        dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))

    with warnings.catch_warnings():
        # https://github.com/Unidata/netcdf4-python/issues/766
        warnings.filterwarnings("ignore", category=RuntimeWarning)
        pok = np.where(dist <= 1)
    glacier_ext_intersect = glacier_ext * 0
    glacier_ext_intersect[jj[pok], ii[pok]] = 1

    # Scipy does the job
    dis_from_border = 1 + glacier_ext_intersect - glacier_ext
    dis_from_border = distance_transform_edt(dis_from_border) * dx
    dis_from_border[np.where(glacier_mask == 0)] = np.NaN

    # Slope mask
    sy, sx = np.gradient(topo, dx, dx)
    slope = np.arctan(np.sqrt(sy**2 + sx**2))
    slope = np.clip(slope, np.deg2rad(cfg.PARAMS['min_slope'] * 2), np.pi / 2.)
    slope = 1 / slope**(cfg.N / (cfg.N + 2))
    slope = np.where(glacier_mask == 1, slope, np.NaN)

    # Area above or below mask
    topo = np.where(glacier_mask == 1, topo, np.NaN)
    dis_from_minmax = topo * 0.
    jj, ii = np.nonzero(glacier_mask == 1)

    with warnings.catch_warnings():
        # https://github.com/Unidata/netcdf4-python/issues/766
        warnings.filterwarnings("ignore", category=RuntimeWarning)
        for j, i in zip(jj, ii):
            area_above = np.sum(topo > topo[j, i])
            area_below = np.sum(topo < topo[j, i])
            dis_from_minmax[j, i] = np.min([area_above, area_below])

    # smooth a little bit
    dis_from_minmax = dis_from_minmax**0.5

    assert not np.isfinite(slope[0, 0])
    assert not np.isfinite(dis_from_minmax[0, 0])
    assert not np.isfinite(dis_from_border[0, 0])  # write

    with netCDF4.Dataset(grids_file, 'a') as nc:

        vn = 'slope_mask'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f8', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Local slope (normalized)'
        v[:] = slope / np.nanmean(slope)

        vn = 'topo_mask'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f8', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Distance from top or bottom (normalized)'
        v[:] = dis_from_minmax / np.nanmean(dis_from_minmax)

        vn = 'distance_mask'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f8', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Distance from glacier border (normalized)'
        v[:] = dis_from_border / np.nanmean(dis_from_border)
Пример #11
0
def define_glacier_region(gdir, entity=None):
    """Very first task: define the glacier's local grid.

    Defines the local projection (Transverse Mercator), centered on the
    glacier. There is some options to set the resolution of the local grid.
    It can be adapted depending on the size of the glacier with::

        dx (m) = d1 * AREA (km) + d2 ; clipped to dmax

    or be set to a fixed value. See ``params.cfg`` for setting these options.
    Default values of the adapted mode lead to a resolution of 50 m for
    Hintereisferner, which is approx. 8 km2 large.
    After defining the grid, the topography and the outlines of the glacier
    are transformed into the local projection. The default interpolation for
    the topography is `cubic`.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    entity : geopandas.GeoSeries
        the glacier geometry to process
    """

    # Make a local glacier map
    proj_params = dict(name='tmerc', lat_0=0., lon_0=gdir.cenlon,
                       k=0.9996, x_0=0, y_0=0, datum='WGS84')
    proj4_str = "+proj={name} +lat_0={lat_0} +lon_0={lon_0} +k={k} " \
                "+x_0={x_0} +y_0={y_0} +datum={datum}".format(**proj_params)
    proj_in = pyproj.Proj("+init=EPSG:4326", preserve_units=True)
    proj_out = pyproj.Proj(proj4_str, preserve_units=True)
    project = partial(pyproj.transform, proj_in, proj_out)
    # transform geometry to map
    geometry = shapely.ops.transform(project, entity['geometry'])
    geometry = multi_to_poly(geometry, gdir=gdir)
    xx, yy = geometry.exterior.xy

    # Save transformed geometry to disk
    entity = entity.copy()
    entity['geometry'] = geometry
    # Avoid fiona bug: https://github.com/Toblerity/Fiona/issues/365
    for k, s in entity.iteritems():
        if type(s) in [np.int32, np.int64]:
            entity[k] = int(s)
    towrite = gpd.GeoDataFrame(entity).T
    towrite.crs = proj4_str
    # Delete the source before writing
    if 'DEM_SOURCE' in towrite:
        del towrite['DEM_SOURCE']

    # Define glacier area to use
    area = entity['Area']

    # Do we want to use the RGI area or ours?
    if not cfg.PARAMS['use_rgi_area']:
        area = geometry.area * 1e-6
        entity['Area'] = area
        towrite['Area'] = area

    # Write shapefile
    gdir.write_shapefile(towrite, 'outlines')

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    if len(gdf) > 0:
        gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) |
                       (gdf.RGIId_2 == gdir.rgi_id))]
        if len(gdf) > 0:
            gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
            if hasattr(gdf.crs, 'srs'):
                # salem uses pyproj
                gdf.crs = gdf.crs.srs
            gdir.write_shapefile(gdf, 'intersects')
    else:
        # Sanity check
        if cfg.PARAMS['use_intersects']:
            raise InvalidParamsError('You seem to have forgotten to set the '
                                     'intersects file for this run. OGGM '
                                     'works better with such a file. If you '
                                     'know what your are doing, set '
                                     "cfg.PARAMS['use_intersects'] = False to "
                                     "suppress this error.")

    # 6. choose a spatial resolution with respect to the glacier area
    dxmethod = cfg.PARAMS['grid_dx_method']
    if dxmethod == 'linear':
        dx = np.rint(cfg.PARAMS['d1'] * area + cfg.PARAMS['d2'])
    elif dxmethod == 'square':
        dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
    elif dxmethod == 'fixed':
        dx = np.rint(cfg.PARAMS['fixed_dx'])
    else:
        raise InvalidParamsError('grid_dx_method not supported: {}'
                                 .format(dxmethod))
    # Additional trick for varying dx
    if dxmethod in ['linear', 'square']:
        dx = np.clip(dx, cfg.PARAMS['d2'], cfg.PARAMS['dmax'])

    log.debug('(%s) area %.2f km, dx=%.1f', gdir.rgi_id, area, dx)

    # Safety check
    border = cfg.PARAMS['border']
    if border > 1000:
        raise InvalidParamsError("You have set a cfg.PARAMS['border'] value "
                                 "of {}. ".format(cfg.PARAMS['border']) +
                                 'This a very large value, which is '
                                 'currently not supported in OGGM.')

    # For tidewater glaciers we force border to 10
    if gdir.is_tidewater and cfg.PARAMS['clip_tidewater_border']:
        border = 10

    # Corners, incl. a buffer of N pix
    ulx = np.min(xx) - border * dx
    lrx = np.max(xx) + border * dx
    uly = np.max(yy) + border * dx
    lry = np.min(yy) - border * dx
    # n pixels
    nx = np.int((lrx - ulx) / dx)
    ny = np.int((uly - lry) / dx)

    # Back to lon, lat for DEM download/preparation
    tmp_grid = salem.Grid(proj=proj_out, nxny=(nx, ny), x0y0=(ulx, uly),
                          dxdy=(dx, -dx), pixel_ref='corner')
    minlon, maxlon, minlat, maxlat = tmp_grid.extent_in_crs(crs=salem.wgs84)

    # Open DEM
    source = entity.DEM_SOURCE if hasattr(entity, 'DEM_SOURCE') else None
    dem_list, dem_source = get_topo_file((minlon, maxlon), (minlat, maxlat),
                                         rgi_region=gdir.rgi_region,
                                         rgi_subregion=gdir.rgi_subregion,
                                         source=source)
    log.debug('(%s) DEM source: %s', gdir.rgi_id, dem_source)
    log.debug('(%s) N DEM Files: %s', gdir.rgi_id, len(dem_list))

    # A glacier area can cover more than one tile:
    if len(dem_list) == 1:
        dem_dss = [rasterio.open(dem_list[0])]  # if one tile, just open it
        dem_data = rasterio.band(dem_dss[0], 1)
        if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'):
            src_transform = dem_dss[0].transform
        else:
            src_transform = dem_dss[0].affine
    else:
        dem_dss = [rasterio.open(s) for s in dem_list]  # list of rasters
        dem_data, src_transform = merge_tool(dem_dss)  # merged rasters

    # Use Grid properties to create a transform (see rasterio cookbook)
    dst_transform = rasterio.transform.from_origin(
        ulx, uly, dx, dx  # sign change (2nd dx) is done by rasterio.transform
    )

    # Set up profile for writing output
    profile = dem_dss[0].profile
    profile.update({
        'crs': proj4_str,
        'transform': dst_transform,
        'width': nx,
        'height': ny
    })

    # Could be extended so that the cfg file takes all Resampling.* methods
    if cfg.PARAMS['topo_interp'] == 'bilinear':
        resampling = Resampling.bilinear
    elif cfg.PARAMS['topo_interp'] == 'cubic':
        resampling = Resampling.cubic
    else:
        raise InvalidParamsError('{} interpolation not understood'
                                 .format(cfg.PARAMS['topo_interp']))

    dem_reproj = gdir.get_filepath('dem')
    profile.pop('blockxsize', None)
    profile.pop('blockysize', None)
    profile.pop('compress', None)
    nodata = dem_dss[0].meta.get('nodata', None)
    if source == 'TANDEM' and nodata is None:
        # badly tagged geotiffs, let's do it ourselves
        nodata = -32767
    with rasterio.open(dem_reproj, 'w', **profile) as dest:
        dst_array = np.empty((ny, nx), dtype=dem_dss[0].dtypes[0])
        reproject(
            # Source parameters
            source=dem_data,
            src_crs=dem_dss[0].crs,
            src_transform=src_transform,
            src_nodata=nodata,
            # Destination parameters
            destination=dst_array,
            dst_transform=dst_transform,
            dst_crs=proj4_str,
            dst_nodata=nodata,
            # Configuration
            resampling=resampling)

        dest.write(dst_array, 1)

    for dem_ds in dem_dss:
        dem_ds.close()

    # Glacier grid
    x0y0 = (ulx+dx/2, uly-dx/2)  # To pixel center coordinates
    glacier_grid = salem.Grid(proj=proj_out, nxny=(nx, ny),  dxdy=(dx, -dx),
                              x0y0=x0y0)
    glacier_grid.to_json(gdir.get_filepath('glacier_grid'))

    # Write DEM source info
    gdir.add_to_diagnostics('dem_source', dem_source)
    source_txt = DEM_SOURCE_INFO.get(dem_source, dem_source)
    with open(gdir.get_filepath('dem_source'), 'w') as fw:
        fw.write(source_txt)
        fw.write('\n\n')
        fw.write('# Data files\n\n')
        for fname in dem_list:
            fw.write('{}\n'.format(os.path.basename(fname)))
Пример #12
0
def interpolation_masks(gdir):
    """Computes the glacier exterior masks taking ice divides into account.

    This is useful for distributed ice thickness. The masks are added to the
    gridded data file. For convenience we also add a slope mask.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    """

    # Variables
    grids_file = gdir.get_filepath('gridded_data')
    with ncDataset(grids_file) as nc:
        topo_smoothed = nc.variables['topo_smoothed'][:]
        glacier_mask = nc.variables['glacier_mask'][:]

    # Glacier exterior including nunataks
    erode = binary_erosion(glacier_mask)
    glacier_ext = glacier_mask ^ erode
    glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)

    # Intersects between glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('intersects'):
        # read and transform to grid
        gdf = gdir.read_shapefile('intersects')
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    # Ice divide mask
    # Probably not the fastest way to do this, but it works
    dist = np.array([])
    jj, ii = np.where(glacier_ext)
    for j, i in zip(jj, ii):
        dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))

    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=RuntimeWarning)
        pok = np.where(dist <= 1)
    glacier_ext_intersect = glacier_ext * 0
    glacier_ext_intersect[jj[pok], ii[pok]] = 1

    # Distance from border mask - Scipy does the job
    dx = gdir.grid.dx
    dis_from_border = 1 + glacier_ext_intersect - glacier_ext
    dis_from_border = distance_transform_edt(dis_from_border) * dx

    # Slope
    glen_n = cfg.PARAMS['glen_n']
    sy, sx = np.gradient(topo_smoothed, dx, dx)
    slope = np.arctan(np.sqrt(sy**2 + sx**2))
    slope = np.clip(slope, np.deg2rad(cfg.PARAMS['min_slope']*4), np.pi/2.)
    slope = 1 / slope**(glen_n / (glen_n+2))

    with ncDataset(grids_file, 'a') as nc:

        vn = 'glacier_ext_erosion'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'i1', ('y', 'x', ))
        v.units = '-'
        v.long_name = 'Glacier exterior with binary erosion method'
        v[:] = glacier_ext

        vn = 'ice_divides'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'i1', ('y', 'x', ))
        v.units = '-'
        v.long_name = 'Glacier ice divides'
        v[:] = glacier_ext_intersect

        vn = 'slope_factor'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
        v.units = '-'
        v.long_name = 'Slope factor as defined in Farinotti et al 2009'
        v[:] = slope

        vn = 'dis_from_border'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ))
        v.units = 'm'
        v.long_name = 'Distance from border'
        v[:] = dis_from_border
Пример #13
0
def trend_all():

    #mcs = cnst.GRIDSAT_PERU + 'aggs/gridsat_WA_-40_allClouds_monthly.nc'
    mcs = cnst.GRIDSAT_PERU + 'aggs/gridsat_WA_count_-50_allClouds_monthly.nc'
    chirps = '/media/ck/Elements/SouthAmerica/CHIRPS/chirps-v2.0.monthly.nc'
    enso = '/home/ck/DIR/mymachine/ENSO/ONI.csv'  #'/home/ck/DIR/mymachine/ENSO/meiv2.data'
    fpath = cnst.network_data + 'figs/HUARAZ/'

    fname = '/home/ck/DIR/cornkle/data/HUARAZ/shapes/riosan_sel_one.shp'
    isbuffer = [-79, -74, -12, -7]

    sdf = salem.read_shapefile(fname)
    sdf = salem.transform_geopandas(sdf, to_crs=salem.wgs84)

    da3 = xr.open_dataarray(mcs).sel(lon=slice(isbuffer[0], isbuffer[1]),
                                     lat=slice(isbuffer[2], isbuffer[3]))
    ca = xr.open_dataarray(chirps).sel(longitude=slice(isbuffer[0],
                                                       isbuffer[1]),
                                       latitude=slice(isbuffer[2],
                                                      isbuffer[3]))
    # This masks out the data which is not in the region

    ens = pd.read_csv(enso,
                      sep=',',
                      engine='python',
                      names=np.arange(0, 13),
                      index_col=0)

    ca[0, :, :].salem.roi(shape=sdf).plot.pcolormesh()

    da3 = da3.salem.roi(shape=sdf).mean(['lat', 'lon']) * 100
    ca = ca.salem.roi(shape=sdf).mean(['latitude', 'longitude'])
    months = [
        1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12
    ]  #,4,5,6,9,10,11#,4,5,6,9,10,11,(3,5), (9,11)]#, 10,5,9]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]#[(12,2)]#[1,2,3,4,5,6,7,8,9,10,11,12]# #,2,3,11,12]

    dicm = {}
    dicmean = {}

    f = plt.figure(figsize=(15, 7.5), dpi=300)

    #sdf = salem.transform_geopandas(sdf, to_crs=salem.wgs84)

    for ids, m in enumerate(months):
        method = 'mk'

        if type(m) == int:
            m = [m]

        ensmonth = ens[m[0]]

        eens = ensmonth.loc['1985':'2019']

        sig = True

        da = da3[(da3['time.month'] == m[0]) & (da3['time.year'] >= 1985) &
                 (da3['time.year'] <= 2019)]

        ch = ca[(ca['time.month'] == m[0]) & (ca['time.year'] >= 1985) &
                (ca['time.year'] <= 2019)]

        da = da - np.mean(da.values)
        ch = ch - np.mean(ch.values)

        sslope, sint, srval, spval, serr = stats.linregress(
            np.arange(len(da.values)), da.values)
        print('linear regression for shear', m, sslope, srval, spval)

        cslope, cint, crval, cpval, cerr = stats.linregress(
            np.arange(len(ch.values)), ch.values)
        print('linear regression for shear', m, cslope, crval, cpval)

        #ipdb.set_trace()

        dslope = sslope * 10
        cdslope = cslope * 10

        if len(m) == 1:
            fp = fpath + 'MCS_only_trendmap_Allmonths_count-50C_lines_Huaraz_1985-2018_' + str(
                m[0]).zfill(2) + '.png'
        else:
            fp = fpath + 'MCS_only_trendmap_1985-2018_' + str(
                m[0]).zfill(2) + '-' + str(m[1]).zfill(2) + '.png'

        ax = f.add_subplot(3, 4, ids + 1)

        x = np.arange(0, len(ch['time.year']))

        ax.plot(ch['time.year'],
                ch,
                marker='o',
                markersize=3,
                label='Trend: ' + str(np.round(cdslope, 2)) +
                'mm / month decade | ' + 'p=' + str(np.round(cpval, 2)),
                color='blue')
        ax.plot(ch['time.year'],
                cint + cslope * np.arange(0, len(ch['time.year'])),
                linestyle='dashed',
                color='blue')
        cc = []
        for enb in eens.values:
            if enb < 0:
                cc.append('lightblue')
            else:
                cc.append('red')

        ax.bar(eens.index.values, eens.values * 30, color=cc)
        plt.title('Month: ' + str(m[0]))
        ax.set_ylabel('mm month$^{-1}$ (blue)')
        ax.set_ylim(-90, 90)
        plt.legend(fontsize=6)
        ax1 = ax.twinx()
        ax1.plot(da['time.year'], da, color='orange', marker='o', markersize=3)
        ax1.plot(da['time.year'],
                 sint + sslope * np.arange(0, len(da['time.year'])),
                 color='orange',
                 linestyle='dashed')
        ax1.set_ylabel('% cloud cover/month (orange)')
        ax1.set_ylim(-20, 20)

    plt.tight_layout()
    plt.savefig(fp)
    plt.close('all')
Пример #14
0
def catchment_width_geom(gdir, div_id=None):
    """Compute geometrical catchment widths for each point of the flowlines.

    Updates the 'inversion_flowlines' save file.

    Parameters
    ----------
    gdir : oggm.GlacierDirectory
    """

    # variables
    flowlines = gdir.read_pickle('inversion_flowlines', div_id=div_id)
    catchment_indices = gdir.read_pickle('catchment_indices', div_id=div_id)
    xmesh, ymesh = np.meshgrid(np.arange(0, gdir.grid.nx, 1),
                               np.arange(0, gdir.grid.ny, 1))

    # Topography is to filter the unrealistic lines afterwards.
    # I take the non-smoothed topography
    # I remove the boundary pixs because they are likely to be higher
    fpath = gdir.get_filepath('gridded_data', div_id=div_id)
    with netCDF4.Dataset(fpath) as nc:
        topo = nc.variables['topo'][:]
        mask_ext = nc.variables['glacier_ext'][:]
        mask_glacier = nc.variables['glacier_mask'][:]
    topo[np.where(mask_glacier == 0)] = np.NaN
    topo[np.where(mask_ext == 1)] = np.NaN

    # Intersects between catchments/glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('catchments_intersects', div_id=div_id):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('catchments_intersects',
                                              div_id=div_id))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])
    if gdir.has_file('intersects', div_id=0):
        # read and transform to grid
        gdf = gpd.read_file(gdir.get_filepath('intersects', div_id=0))
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    # apply a buffer to be sure we get the intersects right. Be generous
    gdfi = gdfi.buffer(1.5)

    # Filter parameters
    # Number of pixels to arbitrarily remove at junctions
    jpix = int(cfg.PARAMS['flowline_junction_pix'])

    # Loop over the lines
    mask = np.zeros((gdir.grid.ny, gdir.grid.nx))
    for fl, ci in zip(flowlines, catchment_indices):

        n = len(fl.dis_on_line)

        widths = np.zeros(n)
        wlines = []

        # Catchment polygon
        mask[:] = 0
        mask[tuple(ci.T)] = 1
        poly, poly_no = _mask_to_polygon(mask, x=xmesh, y=ymesh, gdir=gdir)

        # First guess widths
        for i, (normal, pcoord) in enumerate(zip(fl.normals, fl.line.coords)):
            width, wline = _point_width(normal, pcoord, fl, poly, poly_no)
            widths[i] = width
            wlines.append(wline)

        valid = np.where(np.isfinite(widths))
        if len(valid[0]) == 0:
            errmsg = '{}: first guess widths went wrong.'.format(gdir.rgi_id)
            raise RuntimeError(errmsg)

        # Ok now the entire centerline is computed.
        # I take all these widths for geometrically valid, and see if they
        # intersect with our buffered catchment/glacier intersections
        touches_border = []
        for wg in wlines:
            touches_border.append(np.any(gdfi.intersects(wg)))
        touches_border = _filter_grouplen(touches_border, minsize=5)

        # we filter the lines which have a large altitude range
        fil_widths = _filter_for_altitude_range(widths, wlines, topo)

        # Filter +- widths at junction points
        for fid in fl.inflow_indices:
            i0 = np.clip(fid-jpix, jpix/2, n-jpix/2).astype(np.int64)
            i1 = np.clip(fid+jpix+1, jpix/2, n-jpix/2).astype(np.int64)
            fil_widths[i0:i1] = np.NaN

        valid = np.where(np.isfinite(fil_widths))
        if len(valid[0]) == 0:
            # This happens very rarely. Just pick the middle and
            # the correction task should do the rest
            log.warning('{}: width filtering too strong.'.format(gdir.rgi_id))
            fil_widths = widths[np.int(len(widths) / 2.)]

        # Write it in the objects attributes
        assert len(fil_widths) == n
        fl.widths = fil_widths
        fl.geometrical_widths = wlines
        fl.touches_border = touches_border

    # Overwrite pickle
    gdir.write_pickle(flowlines, 'inversion_flowlines', div_id=div_id)
Пример #15
0
    def _reproject_and_write_shapefile(self, entity, filesuffix=''):

        # Make a local glacier map
        params = dict(name='tmerc',
                      lat_0=0.,
                      lon_0=self.cenlon,
                      k=0.9996,
                      x_0=0,
                      y_0=0,
                      datum='WGS84')
        proj4_str = "+proj={name} +lat_0={lat_0} +lon_0={lon_0} +k={k} " \
                    "+x_0={x_0} +y_0={y_0} +datum={datum}".format(**params)

        # Reproject
        proj_in = pyproj.Proj("epsg:4326", preserve_units=True)
        proj_out = pyproj.Proj(proj4_str, preserve_units=True)

        # transform geometry to map
        project = partial(transform_proj, proj_in, proj_out)
        geometry = shp_trafo(project, entity['geometry'])
        geometry = multipolygon_to_polygon(geometry, gdir=self)

        # Save transformed geometry to disk
        entity = entity.copy()
        entity['geometry'] = geometry

        # Do we want to use the RGI area or ours?
        if not cfg.PARAMS['use_rgi_area']:
            # Update Area
            area = geometry.area * 1e-6
            entity['Area'] = area

        # Avoid fiona bug: https://github.com/Toblerity/Fiona/issues/365
        for k, s in entity.iteritems():
            if type(s) in [np.int32, np.int64]:
                entity[k] = int(s)
        towrite = gpd.GeoDataFrame(entity).T
        towrite.crs = proj4_str

        # Write shapefile
        self.write_shapefile(towrite, 'outlines', filesuffix=filesuffix)

        # Also transform the intersects if necessary
        gdf = cfg.PARAMS['intersects_gdf']
        if len(gdf) > 0:
            gdf = gdf.loc[((gdf.RGIId_1 == self.rgi_id) |
                           (gdf.RGIId_2 == self.rgi_id))]
            if len(gdf) > 0:
                gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
                if hasattr(gdf.crs, 'srs'):
                    # salem uses pyproj
                    gdf.crs = gdf.crs.srs
                self.write_shapefile(gdf, 'intersects')
        else:
            # Sanity check
            if cfg.PARAMS['use_intersects']:
                raise InvalidParamsError(
                    'You seem to have forgotten to set the '
                    'intersects file for this run. OGGM '
                    'works better with such a file. If you '
                    'know what your are doing, set '
                    "cfg.PARAMS['use_intersects'] = False to "
                    "suppress this error.")
Пример #16
0
def define_glacier_region(gdir, entity=None):
    """
    Very first task: define the glacier's local grid.

    Defines the local projection (Transverse Mercator), centered on the
    glacier. There is some options to set the resolution of the local grid.
    It can be adapted depending on the size of the glacier with::

        dx (m) = d1 * AREA (km) + d2 ; clipped to dmax

    or be set to a fixed value. See ``params.cfg`` for setting these options.
    Default values of the adapted mode lead to a resolution of 50 m for
    Hintereisferner, which is approx. 8 km2 large.
    After defining the grid, the topography and the outlines of the glacier
    are transformed into the local projection. The default interpolation for
    the topography is `cubic`.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    entity : geopandas GeoSeries
        the glacier geometry to process
    """

    # choose a spatial resolution with respect to the glacier area
    dxmethod = cfg.PARAMS['grid_dx_method']
    area = gdir.rgi_area_km2
    if dxmethod == 'linear':
        dx = np.rint(cfg.PARAMS['d1'] * area + cfg.PARAMS['d2'])
    elif dxmethod == 'square':
        dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
    elif dxmethod == 'fixed':
        dx = np.rint(cfg.PARAMS['fixed_dx'])
    else:
        raise ValueError('grid_dx_method not supported: {}'.format(dxmethod))
    # Additional trick for varying dx
    if dxmethod in ['linear', 'square']:
        dx = np.clip(dx, cfg.PARAMS['d2'], cfg.PARAMS['dmax'])

    log.debug('%s: area %.2f km, dx=%.1f', gdir.rgi_id, area, dx)

    # Make a local glacier map
    proj_params = dict(name='tmerc', lat_0=0., lon_0=gdir.cenlon,
                       k=0.9996, x_0=0, y_0=0, datum='WGS84')
    proj4_str = "+proj={name} +lat_0={lat_0} +lon_0={lon_0} +k={k} " \
                "+x_0={x_0} +y_0={y_0} +datum={datum}".format(**proj_params)
    proj_in = pyproj.Proj("+init=EPSG:4326", preserve_units=True)
    proj_out = pyproj.Proj(proj4_str, preserve_units=True)
    project = partial(pyproj.transform, proj_in, proj_out)
    # transform geometry to map
    geometry = shapely.ops.transform(project, entity['geometry'])
    geometry = _check_geometry(geometry)
    xx, yy = geometry.exterior.xy

    # Corners, incl. a buffer of N pix
    ulx = np.min(xx) - cfg.PARAMS['border'] * dx
    lrx = np.max(xx) + cfg.PARAMS['border'] * dx
    uly = np.max(yy) + cfg.PARAMS['border'] * dx
    lry = np.min(yy) - cfg.PARAMS['border'] * dx
    # n pixels
    nx = np.int((lrx - ulx) / dx)
    ny = np.int((uly - lry) / dx)

    # Back to lon, lat for DEM download/preparation
    tmp_grid = salem.Grid(proj=proj_out, nxny=(nx, ny), x0y0=(ulx, uly),
                          dxdy=(dx, -dx), pixel_ref='corner')
    minlon, maxlon, minlat, maxlat = tmp_grid.extent_in_crs(crs=salem.wgs84)

    # save transformed geometry to disk
    entity = entity.copy()
    entity['geometry'] = geometry
    # Avoid fiona bug: https://github.com/Toblerity/Fiona/issues/365
    for k, s in entity.iteritems():
        if type(s) in [np.int32, np.int64]:
            entity[k] = int(s)
    towrite = gpd.GeoDataFrame(entity).T
    towrite.crs = proj4_str
    # Delete the source before writing
    if 'DEM_SOURCE' in towrite:
        del towrite['DEM_SOURCE']
    towrite.to_file(gdir.get_filepath('outlines'))

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    gdf = gdf.loc[(gdf.RGIId_1 == gdir.rgi_id) | (gdf.RGIId_2 == gdir.rgi_id)]
    if len(gdf) > 0:
        gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
        if hasattr(gdf.crs, 'srs'):
            # salem uses pyproj
            gdf.crs = gdf.crs.srs
        gdf.to_file(gdir.get_filepath('intersects'))

    # Open DEM
    source = entity.DEM_SOURCE if hasattr(entity, 'DEM_SOURCE') else None
    dem_list, dem_source = get_topo_file((minlon, maxlon), (minlat, maxlat),
                                         rgi_region=gdir.rgi_region,
                                         source=source)
    log.debug('%s: DEM source: %s', gdir.rgi_id, dem_source)

    # A glacier area can cover more than one tile:
    if len(dem_list) == 1:
        dem_dss = [rasterio.open(dem_list[0])]  # if one tile, just open it
        dem_data = rasterio.band(dem_dss[0], 1)
        if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'):
            src_transform = dem_dss[0].transform
        else:
            src_transform = dem_dss[0].affine
    else:
        dem_dss = [rasterio.open(s) for s in dem_list]  # list of rasters
        dem_data, src_transform = merge_tool(dem_dss)  # merged rasters

    # Use Grid properties to create a transform (see rasterio cookbook)
    dst_transform = rasterio.transform.from_origin(
        ulx, uly, dx, dx  # sign change (2nd dx) is done by rasterio.transform
    )

    # Set up profile for writing output
    profile = dem_dss[0].profile
    profile.update({
        'crs': proj4_str,
        'transform': dst_transform,
        'width': nx,
        'height': ny
    })

    # Could be extended so that the cfg file takes all Resampling.* methods
    if cfg.PARAMS['topo_interp'] == 'bilinear':
        resampling = Resampling.bilinear
    elif cfg.PARAMS['topo_interp'] == 'cubic':
        resampling = Resampling.cubic
    else:
        raise ValueError('{} interpolation not understood'
                         .format(cfg.PARAMS['topo_interp']))

    dem_reproj = gdir.get_filepath('dem')
    with rasterio.open(dem_reproj, 'w', **profile) as dest:
        dst_array = np.empty((ny, nx), dtype=dem_dss[0].dtypes[0])
        reproject(
            # Source parameters
            source=dem_data,
            src_crs=dem_dss[0].crs,
            src_transform=src_transform,
            # Destination parameters
            destination=dst_array,
            dst_transform=dst_transform,
            dst_crs=proj4_str,
            # Configuration
            resampling=resampling)

        dest.write(dst_array, 1)

    for dem_ds in dem_dss:
        dem_ds.close()

    # Glacier grid
    x0y0 = (ulx+dx/2, uly-dx/2)  # To pixel center coordinates
    glacier_grid = salem.Grid(proj=proj_out, nxny=(nx, ny),  dxdy=(dx, -dx),
                              x0y0=x0y0)
    glacier_grid.to_json(gdir.get_filepath('glacier_grid'))
    gdir.write_pickle(dem_source, 'dem_source')

    # Looks in the database if the glacier has divides.
    gdf = cfg.PARAMS['divides_gdf']
    if gdir.rgi_id in gdf.index.values:

        div_gdf = gdf.loc[gdir.rgi_id]

        # Compute the intersections between them for later bedshapes
        gdf_inter = polygon_intersections(div_gdf)
        if len(gdf_inter) > 0:
            if hasattr(div_gdf.crs, 'srs'):
                # salem uses pyproj
                gdf_inter.crs = div_gdf.crs.srs
            else:
                gdf_inter.crs = div_gdf.crs

            gdf_inter.to_file(gdir.get_filepath('divides_intersects'))

        # Ok go on
        divlist = [g for g in div_gdf.geometry]

        # Reproject the shape
        def proj(lon, lat):
            return salem.transform_proj(salem.wgs84, gdir.grid.proj,
                                        lon, lat)
        divlist = [shapely.ops.transform(proj, g) for g in divlist]

        # Keep only the ones large enough
        log.debug('%s: divide candidates: %d', gdir.rgi_id, len(divlist))
        divlist = [g for g in divlist if (g.area >= (50 * dx ** 2))]
        log.debug('%s: number of divides: %d', gdir.rgi_id, len(divlist))
        divlist = np.asarray(divlist)

        # Sort them by area
        divlist = divlist[np.argsort([g.area for g in divlist])[::-1]]

        # Write the directories and the files
        for i, g in enumerate(divlist):
            _dir = os.path.join(gdir.dir, 'divide_{0:0=2d}'.format(i + 1))
            if not os.path.exists(_dir):
                os.makedirs(_dir)
            # File
            entity['geometry'] = g
            towrite = gpd.GeoDataFrame(entity).T
            towrite.crs = proj4_str
            towrite.to_file(os.path.join(_dir, cfg.BASENAMES['outlines']))
    else:
        # Make a single directory and link the files
        log.debug('%s: number of divides: %d', gdir.rgi_id, 1)
        _dir = os.path.join(gdir.dir, 'divide_01')
        if not os.path.exists(_dir):
            os.makedirs(_dir)
        linkname = os.path.join(_dir, cfg.BASENAMES['outlines'])
        sourcename = gdir.get_filepath('outlines')
        for ending in ['.cpg', '.dbf', '.shp', '.shx', '.prj']:
            _s = sourcename.replace('.shp', ending)
            _l = linkname.replace('.shp', ending)
            if os.path.exists(_s):
                try:
                    # we are on UNIX
                    os.link(_s, _l)
                except AttributeError:
                    # we are on windows
                    copyfile(_s, _l)
Пример #17
0
def define_glacier_region(gdir, entity=None):
    """Very first task: define the glacier's local grid.

    Defines the local projection (Transverse Mercator), centered on the
    glacier. There is some options to set the resolution of the local grid.
    It can be adapted depending on the size of the glacier with::

        dx (m) = d1 * AREA (km) + d2 ; clipped to dmax

    or be set to a fixed value. See ``params.cfg`` for setting these options.
    Default values of the adapted mode lead to a resolution of 50 m for
    Hintereisferner, which is approx. 8 km2 large.
    After defining the grid, the topography and the outlines of the glacier
    are transformed into the local projection. The default interpolation for
    the topography is `cubic`.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    entity : geopandas.GeoSeries
        the glacier geometry to process
    """

    # Make a local glacier map
    proj_params = dict(name='tmerc',
                       lat_0=0.,
                       lon_0=gdir.cenlon,
                       k=0.9996,
                       x_0=0,
                       y_0=0,
                       datum='WGS84')
    proj4_str = "+proj={name} +lat_0={lat_0} +lon_0={lon_0} +k={k} " \
                "+x_0={x_0} +y_0={y_0} +datum={datum}".format(**proj_params)
    proj_in = pyproj.Proj("+init=EPSG:4326", preserve_units=True)
    proj_out = pyproj.Proj(proj4_str, preserve_units=True)
    project = partial(pyproj.transform, proj_in, proj_out)
    # transform geometry to map
    geometry = shapely.ops.transform(project, entity['geometry'])
    geometry = multi_to_poly(geometry, gdir=gdir)
    xx, yy = geometry.exterior.xy

    # Save transformed geometry to disk
    entity = entity.copy()
    entity['geometry'] = geometry
    # Avoid fiona bug: https://github.com/Toblerity/Fiona/issues/365
    for k, s in entity.iteritems():
        if type(s) in [np.int32, np.int64]:
            entity[k] = int(s)
    towrite = gpd.GeoDataFrame(entity).T
    towrite.crs = proj4_str
    # Delete the source before writing
    if 'DEM_SOURCE' in towrite:
        del towrite['DEM_SOURCE']

    # Define glacier area to use
    area = entity['Area']

    # Do we want to use the RGI area or ours?
    if not cfg.PARAMS['use_rgi_area']:
        area = geometry.area * 1e-6
        entity['Area'] = area
        towrite['Area'] = area

    # Write shapefile
    gdir.write_shapefile(towrite, 'outlines')

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    if len(gdf) > 0:
        gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) |
                       (gdf.RGIId_2 == gdir.rgi_id))]
        if len(gdf) > 0:
            gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
            if hasattr(gdf.crs, 'srs'):
                # salem uses pyproj
                gdf.crs = gdf.crs.srs
            gdir.write_shapefile(gdf, 'intersects')
    else:
        # Sanity check
        if cfg.PARAMS['use_intersects']:
            raise InvalidParamsError('You seem to have forgotten to set the '
                                     'intersects file for this run. OGGM '
                                     'works better with such a file. If you '
                                     'know what your are doing, set '
                                     "cfg.PARAMS['use_intersects'] = False to "
                                     "suppress this error.")

    # 6. choose a spatial resolution with respect to the glacier area
    dxmethod = cfg.PARAMS['grid_dx_method']
    if dxmethod == 'linear':
        dx = np.rint(cfg.PARAMS['d1'] * area + cfg.PARAMS['d2'])
    elif dxmethod == 'square':
        dx = np.rint(cfg.PARAMS['d1'] * np.sqrt(area) + cfg.PARAMS['d2'])
    elif dxmethod == 'fixed':
        dx = np.rint(cfg.PARAMS['fixed_dx'])
    else:
        raise InvalidParamsError(
            'grid_dx_method not supported: {}'.format(dxmethod))
    # Additional trick for varying dx
    if dxmethod in ['linear', 'square']:
        dx = np.clip(dx, cfg.PARAMS['d2'], cfg.PARAMS['dmax'])

    log.debug('(%s) area %.2f km, dx=%.1f', gdir.rgi_id, area, dx)

    # Safety check
    border = cfg.PARAMS['border']
    if border > 1000:
        raise InvalidParamsError("You have set a cfg.PARAMS['border'] value "
                                 "of {}. ".format(cfg.PARAMS['border']) +
                                 'This a very large value, which is '
                                 'currently not supported in OGGM.')

    # For tidewater glaciers we force border to 10
    if gdir.is_tidewater and cfg.PARAMS['clip_tidewater_border']:
        border = 10

    # Corners, incl. a buffer of N pix
    ulx = np.min(xx) - border * dx
    lrx = np.max(xx) + border * dx
    uly = np.max(yy) + border * dx
    lry = np.min(yy) - border * dx
    # n pixels
    nx = np.int((lrx - ulx) / dx)
    ny = np.int((uly - lry) / dx)

    # Back to lon, lat for DEM download/preparation
    tmp_grid = salem.Grid(proj=proj_out,
                          nxny=(nx, ny),
                          x0y0=(ulx, uly),
                          dxdy=(dx, -dx),
                          pixel_ref='corner')
    minlon, maxlon, minlat, maxlat = tmp_grid.extent_in_crs(crs=salem.wgs84)

    # Open DEM
    source = entity.DEM_SOURCE if hasattr(entity, 'DEM_SOURCE') else None
    dem_list, dem_source = get_topo_file((minlon, maxlon), (minlat, maxlat),
                                         rgi_region=gdir.rgi_region,
                                         rgi_subregion=gdir.rgi_subregion,
                                         source=source)
    log.debug('(%s) DEM source: %s', gdir.rgi_id, dem_source)
    log.debug('(%s) N DEM Files: %s', gdir.rgi_id, len(dem_list))

    # A glacier area can cover more than one tile:
    if len(dem_list) == 1:
        dem_dss = [rasterio.open(dem_list[0])]  # if one tile, just open it
        dem_data = rasterio.band(dem_dss[0], 1)
        if LooseVersion(rasterio.__version__) >= LooseVersion('1.0'):
            src_transform = dem_dss[0].transform
        else:
            src_transform = dem_dss[0].affine
    else:
        dem_dss = [rasterio.open(s) for s in dem_list]  # list of rasters
        dem_data, src_transform = merge_tool(dem_dss)  # merged rasters

    # Use Grid properties to create a transform (see rasterio cookbook)
    dst_transform = rasterio.transform.from_origin(
        ulx,
        uly,
        dx,
        dx  # sign change (2nd dx) is done by rasterio.transform
    )

    # Set up profile for writing output
    profile = dem_dss[0].profile
    profile.update({
        'crs': proj4_str,
        'transform': dst_transform,
        'width': nx,
        'height': ny
    })

    # Could be extended so that the cfg file takes all Resampling.* methods
    if cfg.PARAMS['topo_interp'] == 'bilinear':
        resampling = Resampling.bilinear
    elif cfg.PARAMS['topo_interp'] == 'cubic':
        resampling = Resampling.cubic
    else:
        raise InvalidParamsError('{} interpolation not understood'.format(
            cfg.PARAMS['topo_interp']))

    dem_reproj = gdir.get_filepath('dem')
    profile.pop('blockxsize', None)
    profile.pop('blockysize', None)
    profile.pop('compress', None)
    nodata = dem_dss[0].meta.get('nodata', None)
    if source == 'TANDEM' and nodata is None:
        # badly tagged geotiffs, let's do it ourselves
        nodata = -32767
    with rasterio.open(dem_reproj, 'w', **profile) as dest:
        dst_array = np.empty((ny, nx), dtype=dem_dss[0].dtypes[0])
        reproject(
            # Source parameters
            source=dem_data,
            src_crs=dem_dss[0].crs,
            src_transform=src_transform,
            src_nodata=nodata,
            # Destination parameters
            destination=dst_array,
            dst_transform=dst_transform,
            dst_crs=proj4_str,
            dst_nodata=nodata,
            # Configuration
            resampling=resampling)

        dest.write(dst_array, 1)

    for dem_ds in dem_dss:
        dem_ds.close()

    # Glacier grid
    x0y0 = (ulx + dx / 2, uly - dx / 2)  # To pixel center coordinates
    glacier_grid = salem.Grid(proj=proj_out,
                              nxny=(nx, ny),
                              dxdy=(dx, -dx),
                              x0y0=x0y0)
    glacier_grid.to_json(gdir.get_filepath('glacier_grid'))

    # Write DEM source info
    gdir.add_to_diagnostics('dem_source', dem_source)
    source_txt = DEM_SOURCE_INFO.get(dem_source, dem_source)
    with open(gdir.get_filepath('dem_source'), 'w') as fw:
        fw.write(source_txt)
        fw.write('\n\n')
        fw.write('# Data files\n\n')
        for fname in dem_list:
            fw.write('{}\n'.format(os.path.basename(fname)))
Пример #18
0
def interpolation_masks(gdir):
    """Computes the glacier exterior masks taking ice divides into account.

    This is useful for distributed ice thickness. The masks are added to the
    gridded data file. For convenience we also add a slope mask.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data
    """

    # Variables
    grids_file = gdir.get_filepath('gridded_data')
    with ncDataset(grids_file) as nc:
        topo_smoothed = nc.variables['topo_smoothed'][:]
        glacier_mask = nc.variables['glacier_mask'][:]

    # Glacier exterior including nunataks
    erode = binary_erosion(glacier_mask)
    glacier_ext = glacier_mask ^ erode
    glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)

    # Intersects between glaciers
    gdfi = gpd.GeoDataFrame(columns=['geometry'])
    if gdir.has_file('intersects'):
        # read and transform to grid
        gdf = gdir.read_shapefile('intersects')
        salem.transform_geopandas(gdf, gdir.grid, inplace=True)
        gdfi = pd.concat([gdfi, gdf[['geometry']]])

    # Ice divide mask
    # Probably not the fastest way to do this, but it works
    dist = np.array([])
    jj, ii = np.where(glacier_ext)
    for j, i in zip(jj, ii):
        dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))

    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=RuntimeWarning)
        pok = np.where(dist <= 1)
    glacier_ext_intersect = glacier_ext * 0
    glacier_ext_intersect[jj[pok], ii[pok]] = 1

    # Distance from border mask - Scipy does the job
    dx = gdir.grid.dx
    dis_from_border = 1 + glacier_ext_intersect - glacier_ext
    dis_from_border = distance_transform_edt(dis_from_border) * dx

    # Slope
    glen_n = cfg.PARAMS['glen_n']
    sy, sx = np.gradient(topo_smoothed, dx, dx)
    slope = np.arctan(np.sqrt(sy**2 + sx**2))
    slope = np.clip(slope, np.deg2rad(cfg.PARAMS['min_slope'] * 4), np.pi / 2.)
    slope = 1 / slope**(glen_n / (glen_n + 2))

    with ncDataset(grids_file, 'a') as nc:

        vn = 'glacier_ext_erosion'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'i1', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Glacier exterior with binary erosion method'
        v[:] = glacier_ext

        vn = 'ice_divides'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'i1', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Glacier ice divides'
        v[:] = glacier_ext_intersect

        vn = 'slope_factor'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ))
        v.units = '-'
        v.long_name = 'Slope factor as defined in Farinotti et al 2009'
        v[:] = slope

        vn = 'dis_from_border'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ))
        v.units = 'm'
        v.long_name = 'Distance from border'
        v[:] = dis_from_border
Пример #19
0
from collections import OrderedDict
import salem
from utils import u_met, u_parallelise, u_arrays as ua, constants as cnst, u_darrays
from scipy.interpolate import griddata
import multiprocessing
import os
import glob

import pickle as pkl
from wavelet import util as wutil

bigbox = [-79, -65, -17, -3]

fname = '/home/ck/DIR/cornkle/data/HUARAZ/shapes/riosan_sel_one.shp'
sdf = salem.read_shapefile(fname)
sdf = salem.transform_geopandas(sdf, to_crs=salem.wgs84)

file = cnst.GRIDSAT_PERU + 'daily_5000km2_LT_afternoon/gridsat_WA_-40Min_5000km2_13-19UTCperDay_'
file1 = cnst.GRIDSAT_PERU + 'daily_5000km2_UTC_afternoon/gridsat_WA_-40Min_5000km2_UTCDay_'
file2 = cnst.GRIDSAT_PERU + 'daily_ALLkm2_UTC_DAY/gridsat_WA_-40Min_ALLkm2_UTCDay_'

gbox = bigbox
isbuffer = gbox

#topo = xr.open_dataarray(cnst.TOPO_1MIN)
#topo = topo.sel(lon=slice(isbuffer[0], isbuffer[1]), lat=slice(isbuffer[2], isbuffer[3]))

chirps = xr.open_dataset(
    cnst.elements_drive +
    'SouthAmerica/CHIRPS/chirps-v2.0.daily.peru.nc').chunk({'time': 365})
chirps = chirps['precip'].sel(longitude=slice(isbuffer[0], isbuffer[1]),