示例#1
0
def nlcd(
    geometry: Union[Polygon, MultiPolygon, Tuple[float, float, float, float]],
    resolution: float,
    years: Optional[Dict[str, Optional[int]]] = None,
    geo_crs: str = DEF_CRS,
    crs: str = DEF_CRS,
) -> xr.Dataset:
    """Get data from NLCD database (2016).

    Download land use/land cover data from NLCD (2016) database within
    a given geometry in epsg:4326.

    Parameters
    ----------
    geometry : Polygon, MultiPolygon, or tuple of length 4
        The geometry or bounding box (west, south, east, north) for extracting the data.
    resolution : float
        The data resolution in meters. The width and height of the output are computed in pixel
        based on the geometry bounds and the given resolution.
    years : dict, optional
        The years for NLCD data as a dictionary, defaults to
        {'impervious': 2016, 'cover': 2016, 'canopy': 2016}. Set the value of a layer to None,
        to ignore it.
    geo_crs : str, optional
        The CRS of the input geometry, defaults to epsg:4326.
    crs : str, optional
        The spatial reference system to be used for requesting the data, defaults to
        epsg:4326.

    Returns
    -------
     xarray.DataArray
         NLCD within a geometry
    """
    years = {"impervious": 2016, "cover": 2016, "canopy": 2016} if years is None else years
    layers = _nlcd_layers(years)

    _geometry = geoutils.geo2polygon(geometry, geo_crs, crs)

    wms = WMS(ServiceURL().wms.mrlc, layers=layers, outformat="image/geotiff", crs=crs)
    r_dict = wms.getmap_bybox(_geometry.bounds, resolution, box_crs=crs)

    ds = geoutils.gtiff2xarray(r_dict, _geometry, crs)

    if isinstance(ds, xr.DataArray):
        ds = ds.to_dataset()

    for n in ds.keys():
        if "cover" in n.lower():
            ds = ds.rename({n: "cover"})
            ds.cover.attrs["units"] = "classes"
        elif "canopy" in n.lower():
            ds = ds.rename({n: "canopy"})
            ds.canopy.attrs["units"] = "%"
        elif "impervious" in n.lower():
            ds = ds.rename({n: "impervious"})
            ds.impervious.attrs["units"] = "%"

    return ds
示例#2
0
def ssebopeta_bygeom(
    geometry: Union[Polygon, Tuple[float, float, float, float]],
    dates: Union[Tuple[str, str], Union[int, List[int]]],
    geo_crs: str = DEF_CRS,
    fill_holes: bool = False,
) -> xr.DataArray:
    """Get daily actual ET for a region from SSEBop database.

    Notes
    -----
    Since there's still no web service available for subsetting SSEBop, the data first
    needs to be downloaded for the requested period then it is masked by the
    region of interest locally. Therefore, it's not as fast as other functions and
    the bottleneck could be the download speed.

    Parameters
    ----------
    geometry : shapely.geometry.Polygon or tuple
        The geometry for downloading clipping the data. For a tuple bbox,
        the order should be (west, south, east, north).
    dates : tuple or list, optional
        Start and end dates as a tuple (start, end) or a list of years [2001, 2010, ...].
    geo_crs : str, optional
        The CRS of the input geometry, defaults to epsg:4326.
    fill_holes : bool, optional
        Whether to fill the holes in the geometry's interior (Polygon type), defaults to False.

    Returns
    -------
    xarray.DataArray
        Daily actual ET within a geometry in mm/day at 1 km resolution
    """
    _geometry = geoutils.geo2polygon(geometry, geo_crs, DEF_CRS)
    _geometry = Polygon(_geometry.exterior) if fill_holes else _geometry

    f_list = _get_ssebopeta_urls(dates)

    session = RetrySession()

    with session.onlyipv4():

        def _ssebop(url_stamped):
            dt, url = url_stamped
            resp = session.get(url)
            zfile = zipfile.ZipFile(io.BytesIO(resp.content))
            content = zfile.read(zfile.filelist[0].filename)
            ds = geoutils.gtiff2xarray({"eta": content}, _geometry, DEF_CRS)
            return dt, ds.expand_dims({"time": [dt]})

        resp_list = ogc.utils.threading(_ssebop, f_list, max_workers=4)
        data = xr.merge(
            OrderedDict(sorted(resp_list, key=lambda x: x[0])).values())

    eta = data.eta.copy()
    eta *= 1e-3
    eta.attrs.update({"units": "mm/day", "nodatavals": (np.nan, )})
    return eta
示例#3
0
def nlcd(
    geometry: GTYPE,
    resolution: float,
    years: Optional[Mapping[str, Union[int, List[int]]]] = None,
    region: str = "L48",
    geo_crs: str = DEF_CRS,
    crs: str = DEF_CRS,
) -> xr.Dataset:
    """Get data from NLCD database (2019).

    .. deprecated:: 0.11.5
        Use :func:`nlcd_bygeom` or :func:`nlcd_bycoords`  instead.

    Parameters
    ----------
    geometry : Polygon, MultiPolygon, or tuple of length 4
        The geometry or bounding box (west, south, east, north) for extracting the data.
    resolution : float
        The data resolution in meters. The width and height of the output are computed in pixel
        based on the geometry bounds and the given resolution.
    years : dict, optional
        The years for NLCD layers as a dictionary, defaults to
        ``{'impervious': [2019], 'cover': [2019], 'canopy': [2019], "descriptor": [2019]}``.
        Layers that are not in years are ignored, e.g., ``{'cover': [2016, 2019]}`` returns
        land cover data for 2016 and 2019.
    region : str, optional
        Region in the US, defaults to ``L48``. Valid values are ``L48`` (for CONUS),
        ``HI`` (for Hawaii), ``AK`` (for Alaska), and ``PR`` (for Puerto Rico).
        Both lower and upper cases are acceptable.
    geo_crs : str, optional
        The CRS of the input geometry, defaults to epsg:4326.
    crs : str, optional
        The spatial reference system to be used for requesting the data, defaults to
        epsg:4326.

    Returns
    -------
    xarray.Dataset
        NLCD within a geometry
    """
    msg = " ".join([
        "This function is deprecated and will be remove in future versions.",
        "Please use ``nlcd_bygeom`` or ``nlcd_bycoords`` instead.",
        "For now, this function calls ``nlcd_bygeom`` to retain the original functionality.",
    ])
    warnings.warn(msg, DeprecationWarning)
    geom = gpd.GeoSeries([geoutils.geo2polygon(geometry, geo_crs, crs)],
                         crs=crs)
    return nlcd_bygeom(geom, resolution, years, region, crs)[0]
示例#4
0
    def get_bygeom(self, geometry: GTYPE, geo_crs: str) -> gpd.GeoDataFrame:
        """Retrieve NID data within a geometry.

        Parameters
        ----------
        geometry : Polygon, MultiPolygon, or tuple of length 4
            Geometry or bounding box (west, south, east, north) for extracting the data.
        geo_crs : list of str
            The CRS of the input geometry, defaults to epsg:4326.

        Returns
        -------
        geopandas.GeoDataFrame
            GeoDataFrame of NID data

        Examples
        --------
        >>> from pygeohydro import NID
        >>> nid = NID()
        >>> dams = nid.get_bygeom((-69.77, 45.07, -69.31, 45.45), "epsg:4326")
        >>> print(dams.name.iloc[0])
        Little Moose
        """
        _geometry = geoutils.geo2polygon(geometry, geo_crs, DEF_CRS)
        wbd = ArcGISRESTful(
            ServiceURL().restful.wbd,
            4,
            outformat="json",
            outfields="huc8",
            expire_after=self.expire_after,
            disable_caching=self.disable_caching,
        )
        resp = wbd.get_features(wbd.oids_bygeom(_geometry), return_geom=False)
        huc_ids = [
            tlz.get_in(["attributes", "huc8"], i) for r in resp
            for i in tlz.get_in(["features"], r)
        ]

        dams = self.get_byfilter([{"huc8": huc_ids}])[0]
        return dams[dams.within(_geometry)].copy()
示例#5
0
def get_map(
    layers: Union[str, List[str]],
    geometry: Union[Polygon, Tuple[float, float, float, float]],
    resolution: float,
    geo_crs: str = DEF_CRS,
    crs: str = DEF_CRS,
) -> xr.DataArray:
    """Access to `3DEP <https://www.usgs.gov/core-science-systems/ngp/3dep>`__ service.

    The 3DEP service has multi-resolution sources so depending on the user
    provided resolution the data is resampled on server-side based
    on all the available data sources. The following layers are available:
    - "DEM"
    - "Hillshade Gray"
    - "Aspect Degrees"
    - "Aspect Map"
    - "GreyHillshade_elevationFill"
    - "Hillshade Multidirectional"
    - "Slope Map"
    - "Slope Degrees"
    - "Hillshade Elevation Tinted"
    - "Height Ellipsoidal"
    - "Contour 25"
    - "Contour Smoothed 25"

    Parameters
    ----------
    layers : str or list
        A valid 3DEP layer or a list of them
    geometry : Polygon, MultiPolygon, or tuple
        A shapely Polygon or a bounding box (west, south, east, north)
    resolution : float
        The data resolution in meters. The width and height of the output are computed in pixel
        based on the geometry bounds and the given resolution.
    geo_crs : str, optional
        The spatial reference system of the input geometry, defaults to
        epsg:4326.
    crs : str, optional
        The spatial reference system to be used for requesting the data, defaults to
        epsg:4326.

    Returns
    -------
    xarray.DataArray
        The requeted data within the geometry
    """
    if not isinstance(geometry, (Polygon, MultiPolygon, tuple)):
        raise InvalidInputType("geometry", "Polygon or tuple of length 4")

    _geometry = geoutils.geo2polygon(geometry, geo_crs, crs)

    _layers = layers if isinstance(layers, list) else [layers]
    if "DEM" in _layers:
        _layers[_layers.index("DEM")] = "None"

    _layers = [f"3DEPElevation:{lyr}" for lyr in _layers]

    wms = WMS(ServiceURL().wms.nm_3dep, layers=_layers, outformat="image/tiff", crs=crs)
    r_dict = wms.getmap_bybox(_geometry.bounds, resolution, box_crs=crs)

    ds = geoutils.gtiff2xarray(r_dict, _geometry, crs)

    valid_layers = wms.get_validlayers()
    rename = {lyr: lyr.split(":")[-1].replace(" ", "_").lower() for lyr in valid_layers}
    rename.update({"3DEPElevation:None": "elevation"})

    if isinstance(ds, xr.DataArray):
        ds.name = rename[ds.name]
    else:
        ds = ds.rename({n: rename[n] for n in ds.keys()})

    return ds
示例#6
0
def get_bygeom(
    geometry: Union[Polygon, MultiPolygon, Tuple[float, float, float, float]],
    dates: Union[Tuple[str, str], Union[int, List[int]]],
    geo_crs: str = DEF_CRS,
    variables: Optional[List[str]] = None,
    pet: bool = False,
) -> xr.Dataset:
    """Gridded data from the Daymet database at 1-km resolution.

    The data is clipped using NetCDF Subset Service.

    Parameters
    ----------
    geometry : Polygon, MultiPolygon, or bbox
        The geometry of the region of interest.
    dates : tuple or list, optional
        Start and end dates as a tuple (start, end) or a list of years [2001, 2010, ...].
    geo_crs : str, optional
        The CRS of the input geometry, defaults to epsg:4326.
    variables : str or list
        List of variables to be downloaded. The acceptable variables are:
        ``tmin``, ``tmax``, ``prcp``, ``srad``, ``vp``, ``swe``, ``dayl``
        Descriptions can be found `here <https://daymet.ornl.gov/overview>`__.
    pet : bool
        Whether to compute evapotranspiration based on
        `UN-FAO 56 paper <http://www.fao.org/docrep/X0490E/X0490E00.htm>`__.
        The default is False

    Returns
    -------
    xarray.Dataset
        Daily climate data within a geometry
    """
    daymet = Daymet(variables, pet)

    if isinstance(dates, tuple) and len(dates) == 2:
        dates_itr = daymet.dates_tolist(dates)
    elif isinstance(dates, (list, int)):
        dates_itr = daymet.years_tolist(dates)
    else:
        raise InvalidInputType("dates", "tuple or list",
                               "(start, end) or [2001, 2010, ...]")

    _geometry = geoutils.geo2polygon(geometry, geo_crs, DEF_CRS)

    west, south, east, north = _geometry.bounds
    base_url = ServiceURL().restful.daymet_grid
    urls = []

    for s, e in dates_itr:
        for v in daymet.variables:
            urls.append(base_url + "&".join([
                f"{s.year}/daymet_v3_{v}_{s.year}_na.nc4?var=lat",
                "var=lon",
                f"var={v}",
                f"north={north}",
                f"west={west}",
                f"east={east}",
                f"south={south}",
                "disableProjSubset=on",
                "horizStride=1",
                f'time_start={s.strftime("%Y-%m-%dT%H:%M:%SZ")}',
                f'time_end={e.strftime("%Y-%m-%dT%H:%M:%SZ")}',
                "timeStride=1",
                "accept=netcdf",
            ]))

    def getter(url):
        return xr.load_dataset(daymet.session.get(url).content)

    data = xr.merge(ogc.utils.threading(getter, urls, max_workers=8))

    for k, v in daymet.units.items():
        if k in data.variables:
            data[k].attrs["units"] = v

    data = data.drop_vars(["lambert_conformal_conic"])

    crs = " ".join([
        "+proj=lcc",
        "+lat_1=25",
        "+lat_2=60",
        "+lat_0=42.5",
        "+lon_0=-100",
        "+x_0=0",
        "+y_0=0",
        "+ellps=WGS84",
        "+units=km",
        "+no_defs",
    ])
    data.attrs["crs"] = crs
    data.attrs["nodatavals"] = (0.0, )

    x_res, y_res = data.x.diff("x").min().item(), data.y.diff("y").min().item()
    # PixelAsArea Convention
    x_origin = data.x.values[0] - x_res / 2.0
    y_origin = data.y.values[0] - y_res / 2.0

    transform = (x_res, 0, x_origin, 0, y_res, y_origin)

    x_end = x_origin + data.dims["x"] * x_res
    y_end = y_origin + data.dims["y"] * y_res
    x_options = np.array([x_origin, x_end])
    y_options = np.array([y_origin, y_end])

    data.attrs["transform"] = transform
    data.attrs["res"] = (x_res, y_res)
    data.attrs["bounds"] = (
        x_options.min(),
        y_options.min(),
        x_options.max(),
        y_options.max(),
    )

    if pet:
        data = daymet.pet_bygrid(data)

    if isinstance(data, xr.Dataset):
        for v in data:
            data[v].attrs["crs"] = crs
            data[v].attrs["nodatavals"] = (0.0, )

    return geoutils.xarray_geomask(data, _geometry, DEF_CRS)