Exemplo n.º 1
0
def _normalize_data(agg, pixel_max, c, th):
    mapper = ArrayTypeFunctionMapping(numpy_func=_normalize_data_numpy,
                                      dask_func=_normalize_data_dask,
                                      cupy_func=_normalize_data_cupy,
                                      dask_cupy_func=_normalize_data_dask_cupy)
    out = mapper(agg)(agg.data, pixel_max, c, th)
    return out
Exemplo n.º 2
0
def binary(agg, values, name='binary'):
    """
    Binarize a data array based on a set of values. Data that equals to a value in the set will be
    set to 1. In contrast, data that does not equal to any value in the set will be set to 0.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of values to be reclassified.
    values : array-like object
        Values to keep in the binarized array.
    name : str, default='binary'
        Name of output aggregate array.

    Returns
    -------
    binarized_agg : xarray.DataArray, of the same type as `agg`
        2D aggregate array of binarized data array.
        All other input attributes are preserved.

    Examples
    --------
    Binary works with NumPy backed xarray DataArray

    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.classify import binary

        >>> data = np.array([
            [np.nan,  1.,  2.,  3.,  4.],
            [5.,  6.,  7.,  8.,  9.],
            [10., 11., 12., 13., 14.],
            [15., 16., 17., 18., np.inf],
        ], dtype=np.float32)
        >>> agg = xr.DataArray(data)
        >>> values = [1, 2, 3]
        >>> agg_binary = binary(agg, values)
        >>> print(agg_binary)
        <xarray.DataArray 'binary' (dim_0: 4, dim_1: 5)>
        array([[0.,  1.,  1.,  1.,  0.],
               [0.,  0.,  0.,  0.,  0.],
               [0.,  0.,  0.,  0.,  0.],
               [0.,  0.,  0.,  0.,  0.]], dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
    """

    mapper = ArrayTypeFunctionMapping(numpy_func=_run_numpy_binary,
                                      dask_func=_run_dask_numpy_binary,
                                      cupy_func=_run_cupy_binary,
                                      dask_cupy_func=_run_dask_cupy_binary)
    out = mapper(agg)(agg.data, values)
    return xr.DataArray(out,
                        name=name,
                        dims=agg.dims,
                        coords=agg.coords,
                        attrs=agg.attrs)
Exemplo n.º 3
0
def _quantile(agg, k):
    mapper = ArrayTypeFunctionMapping(
        numpy_func=lambda *args: _run_quantile(*args, module=np),
        dask_func=lambda *args: _run_quantile(*args, module=da),
        cupy_func=lambda *args: _run_quantile(*args, module=cupy),
        dask_cupy_func=_run_dask_cupy_quantile)
    out = mapper(agg)(agg.data, k)
    return out
Exemplo n.º 4
0
def _bin(agg, bins, new_values):
    mapper = ArrayTypeFunctionMapping(numpy_func=_run_numpy_bin,
                                      dask_func=_run_dask_numpy_bin,
                                      cupy_func=_run_cupy_bin,
                                      dask_cupy_func=_run_dask_cupy_bin)

    out = mapper(agg)(agg.data, bins, new_values)
    return out
Exemplo n.º 5
0
def convolve_2d(data, kernel):
    mapper = ArrayTypeFunctionMapping(
        numpy_func=_convolve_2d_numpy,
        cupy_func=_convolve_2d_cupy,
        dask_func=_convolve_2d_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args, messages='convolution_2d() does not support dask with cupy backed xr.DataArray'  # noqa
        )
    )
    out = mapper(xr.DataArray(data))(data, kernel)
    return out
Exemplo n.º 6
0
def _mean(data, excludes):
    agg = xr.DataArray(data)
    mapper = ArrayTypeFunctionMapping(
        numpy_func=_mean_numpy,
        cupy_func=_mean_cupy,
        dask_func=_mean_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args, messages='mean() does not support dask with cupy backed DataArray.'),  # noqa
    )
    out = mapper(agg)(agg.data, excludes)
    return out
Exemplo n.º 7
0
def general_output_checks(input_agg: xr.DataArray,
                          output_agg: xr.DataArray,
                          expected_results: np.ndarray = None,
                          verify_attrs: bool = True):

    # type of output is the same as of input
    assert isinstance(output_agg.data, type(input_agg.data))

    if isinstance(input_agg.data, da.Array):
        # dask case
        assert isinstance(output_agg.data.compute(),
                          type(input_agg.data.compute()))

    if verify_attrs:
        # shape and other attributes remain the same
        assert output_agg.shape == input_agg.shape
        assert output_agg.dims == input_agg.dims
        assert output_agg.attrs == input_agg.attrs
        for coord in input_agg.coords:
            np.testing.assert_allclose(output_agg[coord].data,
                                       input_agg[coord].data,
                                       equal_nan=True)

    if expected_results is not None:
        numpy_func = lambda output, expected: np.testing.assert_allclose(  # noqa: E731, E501
            output,
            expected_results,
            equal_nan=True,
            rtol=1e-06)
        dask_func = lambda output, expected: np.testing.assert_allclose(  # noqa: E731, E501
            output.compute(),
            expected_results,
            equal_nan=True,
            rtol=1e-06)
        cupy_func = lambda output, expected: np.testing.assert_allclose(  # noqa: E731, E501
            output.get(),
            expected_results,
            equal_nan=True,
            rtol=1e-06)
        dask_cupy_func = lambda output, expected: np.testing.assert_allclose(  # noqa: E731, E501
            output.compute().get(),
            expected_results,
            equal_nan=True,
            rtol=1e-06)
        mapper = ArrayTypeFunctionMapping(
            numpy_func=numpy_func,
            dask_func=dask_func,
            cupy_func=cupy_func,
            dask_cupy_func=dask_cupy_func,
        )
        mapper(output_agg)(output_agg.data, expected_results)
Exemplo n.º 8
0
def ndvi(nir_agg: DataArray, red_agg: DataArray, name='ndvi'):
    """
        Computes Normalized Difference Vegetation Index (NDVI).
    Used to determine if a cell contains live green vegetation.

    Parameters:
    ----------
    nir_agg: xarray.DataArray
        2D array of near-infrared band data.
    red_agg: xarray.DataArray
        2D array red band data.
    name: str, optional (default ="ndvi")
        Name of output DataArray.

    Returns
    ----------
    xarray.DataArray
        2D array, of the same type as the input, of calculated ndvi values.
        All other input attributes are preserved.

    Notes:
    ----------
    Algorithm References:
    http://ceholden.github.io/open-geo-tutorial/python/chapter_2_indices.html

        Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> import xrspatial

    Create Sample Band Data
    >>> np.random.seed(0)
    >>> nir_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
    >>> height, width = nir_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> nir_agg["lat"] = _lat
    >>> nir_agg["lon"] = _lon

    >>> np.random.seed(1)
    >>> red_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
    >>> height, width = red_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> red_agg["lat"] = _lat
    >>> red_agg["lon"] = _lon

    >>> print(nir_agg, red_agg)
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.5488135 , 0.71518937, 0.60276338, 0.54488318],
           [0.4236548 , 0.64589411, 0.43758721, 0.891773  ],
           [0.96366276, 0.38344152, 0.79172504, 0.52889492],
           [0.56804456, 0.92559664, 0.07103606, 0.0871293 ]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[4.17022005e-01, 7.20324493e-01, 1.14374817e-04, 3.02332573e-01],
           [1.46755891e-01, 9.23385948e-02, 1.86260211e-01, 3.45560727e-01],
           [3.96767474e-01, 5.38816734e-01, 4.19194514e-01, 6.85219500e-01],
           [2.04452250e-01, 8.78117436e-01, 2.73875932e-02, 6.70467510e-01]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0

    Create NDVI DataArray
    >>> data = xrspatial.multispectral.ndvi(nir_agg, red_agg)
    >>> print(data)
    <xarray.DataArray 'ndvi' (lat: 4, lon: 4)>
    array([[ 0.13645336, -0.0035772 ,  0.99962057,  0.28629143],
           [ 0.4854378 ,  0.74983879,  0.40286613,  0.44144297],
           [ 0.41670295, -0.16847257,  0.30764267, -0.12875605],
           [ 0.4706716 ,  0.02632302,  0.44347537, -0.76998504]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    """

    validate_arrays(nir_agg, red_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_normalized_ratio_cpu,
                                      dask_func=_run_normalized_ratio_dask,
                                      cupy_func=_run_normalized_ratio_cupy,
                                      dask_cupy_func=_run_normalized_ratio_dask_cupy)

    out = mapper(nir_agg)(nir_agg.data, red_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 9
0
def savi(nir_agg: xr.DataArray,
         red_agg: xr.DataArray,
         soil_factor: float = 1.0,
         name: str = 'savi'):
    """
    Computes Soil Adjusted Vegetation Index (SAVI). Used to determine
    if a cell contains living vegetation while minimizing soil
    brightness.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
    red_agg : xr.DataArray
        2D array of red band data.
    soil_factor : float, default=1.0
        soil adjustment factor between -1.0 and 1.0.
        When set to zero, savi will return the same as ndvi.
    name : str, default='savi'
        Name of output DataArray.

    Returns
    -------
    savi_agg : xr.DataArray of same type as inputs
        2D array of  savi values.
        All other input attributes are preserved.

    References
    ----------
        - ScienceDirect: https://www.sciencedirect.com/science/article/abs/pii/003442578890106X # noqa

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> red = data['Red']
        >>> from xrspatial.multispectral import savi
        >>> # Generate SAVI Aggregate Array
        >>> savi_agg = savi(nir_agg=nir, red_agg=red)
        >>> nir.plot(aspect=2, size=4)
        >>> red.plot(aspect=2, size=4)
        >>> savi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(red[y1:y2, x1:x2].data)
        [[1327. 1329. 1363. 1392.]
         [1309. 1331. 1423. 1424.]
         [1293. 1337. 1455. 1414.]]
        >>> print(savi_agg[y1:y2, x1:x2].data)
        [[0.0337197  0.03087509 0.0288528  0.03303152]
         [0.0324884  0.02531194 0.02006069 0.03048781]
         [0.03353769 0.02215077 0.02247375 0.03895046]]
    """

    validate_arrays(red_agg, nir_agg)

    if not -1.0 <= soil_factor <= 1.0:
        raise ValueError("soil factor must be between [-1.0, 1.0]")

    mapper = ArrayTypeFunctionMapping(numpy_func=_savi_cpu,
                                      dask_func=_savi_dask,
                                      cupy_func=_savi_cupy,
                                      dask_cupy_func=_savi_dask_cupy)

    out = mapper(red_agg)(nir_agg.data, red_agg.data, soil_factor)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 10
0
def arvi(nir_agg: xr.DataArray,
         red_agg: xr.DataArray,
         blue_agg: xr.DataArray,
         name='arvi'):
    """
    Computes Atmospherically Resistant Vegetation Index. Allows for
    molecular and ozone correction with no further need for aerosol
    correction, except for dust conditions.

    Parameters
    ----------
    nir_agg : xarray.DataArray
        2D array of near-infrared band data.
    red_agg : xarray.DataArray
        2D array of red band data.
    blue_agg : xarray.DataArray
        2D array of blue band data.
    name : str, default='arvi'
        Name of output DataArray.

    Returns
    -------
    arvi_agg : xarray.DataArray of the same type as inputs.
        2D array arvi values. All other input attributes are preserved.

    References
    ----------
        - MODIS: https://modis.gsfc.nasa.gov/sci_team/pubs/abstract_new.php?id=03667 # noqa

    Examples
    --------
    In this example, we'll use data available in xrspatial.datasets

    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> red = data['Red']
        >>> blue = data['Blue']
        >>> from xrspatial.multispectral import arvi
        >>> # Generate ARVI Aggregate Array
        >>> arvi_agg = arvi(nir_agg=nir, red_agg=red, blue_agg=blue)
        >>> nir.plot(cmap='Greys', aspect=2, size=4)
        >>> red.plot(aspect=2, size=4)
        >>> blue.plot(aspect=2, size=4)
        >>> arvi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(red[y1:y2, x1:x2].data)
        [[1327. 1329. 1363. 1392.]
         [1309. 1331. 1423. 1424.]
         [1293. 1337. 1455. 1414.]]
        >>> print(blue[y1:y2, x1:x2].data)
        [[1281. 1270. 1254. 1297.]
         [1241. 1249. 1280. 1309.]
         [1239. 1257. 1322. 1329.]]
        >>> print(arvi_agg[y1:y2, x1:x2].data)
        [[ 0.02676934  0.02135493  0.01052632  0.01798942]
         [ 0.02130841  0.01114413 -0.0042343   0.01214013]
         [ 0.02488688  0.00816024  0.00068681  0.02650602]]
    """

    validate_arrays(red_agg, nir_agg, blue_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_arvi_cpu,
                                      dask_func=_arvi_dask,
                                      cupy_func=_arvi_cupy,
                                      dask_cupy_func=_arvi_dask_cupy)

    out = mapper(red_agg)(nir_agg.data, red_agg.data, blue_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 11
0
def sipi(nir_agg: xr.DataArray,
         red_agg: xr.DataArray,
         blue_agg: xr.DataArray,
         name='sipi'):
    """
    Computes Structure Insensitive Pigment Index which helpful in early
    disease detection in vegetation.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
    red_agg : xr.DataArray
        2D array of red band data.
    blue_agg : xr.DataArray
        2D array of blue band data.
    name: str, default='sipi'
        Name of output DataArray.

    Returns
    -------
     sipi_agg : xr.DataArray of same type as inputs
        2D array of sipi values.
        All other input attributes are preserved.

    References
    ----------
        - Wikipedia: https://en.wikipedia.org/wiki/Enhanced_vegetation_index

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> red = data['Red']
        >>> blue = data['Blue']
        >>> from xrspatial.multispectral import sipi
        >>> # Generate SIPI Aggregate Array
        >>> sipi_agg = sipi(nir_agg=nir, red_agg=red, blue_agg=blue)
        >>> nir.plot(cmap='Greys', aspect=2, size=4)
        >>> red.plot(aspect=2, size=4)
        >>> blue.plot(aspect=2, size=4)
        >>> sipi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(red[y1:y2, x1:x2].data)
        [[1327. 1329. 1363. 1392.]
         [1309. 1331. 1423. 1424.]
         [1293. 1337. 1455. 1414.]]
        >>> print(blue[y1:y2, x1:x2].data)
        [[1281. 1270. 1254. 1297.]
         [1241. 1249. 1280. 1309.]
         [1239. 1257. 1322. 1329.]]
        >>> print(sipi_agg[y1:y2, x1:x2].data)
        [[1.2395834 1.3371428 1.6526946 1.4822335]
         [1.3736264 1.5774648 2.2016807 1.6216216]
         [1.2903225 1.6451613 1.9708029 1.3556485]]
    """

    validate_arrays(red_agg, nir_agg, blue_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_sipi_cpu,
                                      dask_func=_sipi_dask,
                                      cupy_func=_sipi_cupy,
                                      dask_cupy_func=_sipi_dask_cupy)

    out = mapper(red_agg)(nir_agg.data, red_agg.data, blue_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 12
0
def natural_breaks(agg: xr.DataArray,
                   num_sample: Optional[int] = 20000,
                   name: Optional[str] = 'natural_breaks',
                   k: int = 5) -> xr.DataArray:
    """
    Reclassifies data for array `agg` into new values based on Natural
    Breaks or K-Means clustering method. Values are grouped so that
    similar values are placed in the same group and space between
    groups is maximized.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy DataArray of values to be reclassified.
    num_sample : int, default=20000
        Number of sample data points used to fit the model.
        Natural Breaks (Jenks) classification is indeed O(n²) complexity,
        where n is the total number of data points, i.e: `agg.size`
        When n is large, we should fit the model on a small sub-sample
        of the data instead of using the whole dataset.
    k : int, default=5
        Number of classes to be produced.
    name : str, default='natural_breaks'
        Name of output aggregate.

    Returns
    -------
    natural_breaks_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of natural break allocations.
        All other input attributes are preserved.

    References
    ----------
        - PySAL: https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#NaturalBreaks # noqa
        - jenks: https://github.com/perrygeo/jenks/blob/master/jenks.pyx

    Examples
    -------
    natural_breaks() works with numpy backed xarray DataArray.
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.classify import natural_breaks

        >>> elevation = np.array([
            [np.nan,  1.,  2.,  3.,  4.],
            [ 5.,  6.,  7.,  8.,  9.],
            [10., 11., 12., 13., 14.],
            [15., 16., 17., 18., 19.],
            [20., 21., 22., 23., np.inf]
        ])
        >>> agg_numpy = xr.DataArray(elevation, attrs={'res': (10.0, 10.0)})
        >>> numpy_natural_breaks = natural_breaks(agg_numpy, k=5)
        >>> print(numpy_natural_breaks)
        <xarray.DataArray 'natural_breaks' (dim_0: 5, dim_1: 5)>
        array([[nan,  0.,  0.,  0.,  0.],
               [ 1.,  1.,  1.,  1.,  2.],
               [ 2.,  2.,  2.,  2.,  3.],
               [ 3.,  3.,  3.,  3.,  4.],
               [ 4.,  4.,  4.,  4., nan]], dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10.0, 10.0)

    natural_breaks() works with cupy backed xarray DataArray.
    .. sourcecode:: python

        >>> import cupy
        >>> agg_cupy = xr.DataArray(cupy.asarray(elevation))
        >>> cupy_natural_breaks = natural_breaks(agg_cupy)
        >>> print(type(cupy_natural_breaks))
        <class 'xarray.core.dataarray.DataArray'>
        >>> print(cupy_natural_breaks)
        <xarray.DataArray 'natural_breaks' (dim_0: 5, dim_1: 5)>
        array([[nan,  0.,  0.,  0.,  0.],
               [ 1.,  1.,  1.,  1.,  2.],
               [ 2.,  2.,  2.,  2.,  3.],
               [ 3.,  3.,  3.,  3.,  4.],
               [ 4.,  4.,  4.,  4., nan]], dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
    """

    mapper = ArrayTypeFunctionMapping(
        numpy_func=lambda *args: _run_natural_break(*args),
        dask_func=lambda *args: not_implemented_func(
            *args, messages='natural_breaks() does not support dask with numpy backed DataArray.'),  # noqa
        cupy_func=lambda *args: not_implemented_func(
            *args, messages='natural_breaks() does not support cupy backed DataArray.'),  # noqa
        dask_cupy_func=lambda *args: not_implemented_func(
            *args, messages='natural_breaks() does not support dask with cupy backed DataArray.'),  # noqa
    )
    out = mapper(agg)(agg, num_sample, k)
    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Exemplo n.º 13
0
def hotspots(raster, kernel):
    """
    Identify statistically significant hot spots and cold spots in an
    input raster. To be a statistically significant hot spot, a feature
    will have a high value and be surrounded by other features with
    high values as well.
    Neighborhood of a feature defined by the input kernel, which
    currently support a shape of circle, annulus, or custom kernel.

    The result should be a raster with the following 7 values:
        - 90 for 90% confidence high value cluster
        - 95 for 95% confidence high value cluster
        - 99 for 99% confidence high value cluster
        - 90 for 90% confidence low value cluster
        - 95 for 95% confidence low value cluster
        - 99 for 99% confidence low value cluster
        - 0 for no significance

    Parameters
    ----------
    raster : xarray.DataArray
        2D Input raster image with `raster.shape` = (height, width).
        Can be a NumPy backed, CuPy backed, or Dask with NumPy backed DataArray
    kernel : Numpy Array
        2D array where values of 1 indicate the kernel.

    Returns
    -------
    hotspots_agg : xarray.DataArray of same type as `raster`
        2D array of hotspots with values indicating confidence level.

    Examples
    --------
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.convolution import custom_kernel
        >>> kernel = custom_kernel(np.array([[1, 1, 0]]))
        >>> data = np.array([
        ...    [0, 1000, 1000, 0, 0, 0],
        ...    [0, 0, 0, -1000, -1000, 0],
        ...    [0, -900, -900, 0, 0, 0],
        ...    [0, 100, 1000, 0, 0, 0]])
        >>> from xrspatial.focal import hotspots
        >>> hotspots(xr.DataArray(data), kernel)
        array([[  0,   0,  95,   0,   0,   0],
               [  0,   0,   0,   0, -90,   0],
               [  0,   0, -90,   0,   0,   0],
               [  0,   0,   0,   0,   0,   0]], dtype=int8)
        Dimensions without coordinates: dim_0, dim_1
    """

    # validate raster
    if not isinstance(raster, DataArray):
        raise TypeError("`raster` must be instance of DataArray")

    if raster.ndim != 2:
        raise ValueError("`raster` must be 2D")

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_hotspots_numpy,
        cupy_func=_hotspots_cupy,
        dask_func=_hotspots_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'hotspots() does not support dask with cupy backed DataArray.'
        ),  # noqa
    )
    out = mapper(raster)(raster, kernel)

    attrs = copy.deepcopy(raster.attrs)
    attrs['unit'] = '%'

    return DataArray(out, coords=raster.coords, dims=raster.dims, attrs=attrs)
Exemplo n.º 14
0
def nbr(nir_agg: xr.DataArray, swir2_agg: xr.DataArray, name='nbr'):
    """
    Computes Normalized Burn Ratio. Used to identify burned areas and
    provide a measure of burn severity.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band.
    swir_agg : xr.DataArray
        2D array of shortwave infrared band.
        (Landsat 4-7: Band 6)
        (Landsat 8: Band 7)
    name : str, default='nbr'
        Name of output DataArray.

    Returns
    -------
    nbr_agg : xr.DataArray of the same type as inputs
        2D array of nbr values.
        All other input attributes are preserved.

    References
    ----------
        - USGS: https://www.usgs.gov/land-resources/nli/landsat/landsat-normalized-burn-ratio # noqa
    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> swir2 = data['SWIR2']
        >>> from xrspatial.multispectral import nbr
        >>> # Generate NBR Aggregate Array
        >>> nbr_agg = nbr(nir_agg=nir, swir2_agg=swir2)
        >>> nir.plot(aspect=2, size=4)
        >>> swir2.plot(aspect=2, size=4)
        >>> nbr_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(swir2[y1:y2, x1:x2].data)
        [[1866. 1962. 2086. 2112.]
         [1811. 1900. 2012. 2041.]
         [1838. 1956. 2067. 2109.]]
        >>> print(nbr_agg[y1:y2, x1:x2].data)
        [[-0.10251108 -0.1321408  -0.15376106 -0.14131317]
         [-0.09691096 -0.12659353 -0.13224536 -0.11835616]
         [-0.10823033 -0.14486392 -0.12981689 -0.12121212]]
    """

    validate_arrays(nir_agg, swir2_agg)

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_normalized_ratio_cpu,
        dask_func=_run_normalized_ratio_dask,
        cupy_func=_run_normalized_ratio_cupy,
        dask_cupy_func=_run_normalized_ratio_dask_cupy,
    )

    out = mapper(nir_agg)(nir_agg.data, swir2_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 15
0
def gci(nir_agg: xr.DataArray, green_agg: xr.DataArray, name='gci'):
    """
    Computes Green Chlorophyll Index. Used to estimate the content of
    leaf chorophyll and predict the physiological state of vegetation
    and plant health.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
    green_agg : xr.DataArray
        2D array of green band data.
    name : str, default='gci'
        Name of output DataArray.

    Returns
    -------
    gci_agg : xarray.DataArray of the same type as inputs
        2D array of gci values.
        All other input attributes are preserved.

    References
    ----------
        - Wikipedia: https://en.wikipedia.org/wiki/Enhanced_vegetation_index

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> green = data['Green']
        >>> from xrspatial.multispectral import gci
        >>> # Generate GCI Aggregate Array
        >>> gci_agg = gci(nir_agg=nir, green_agg=green)
        >>> nir.plot(aspect=2, size=4)
        >>> green.plot(aspect=2, size=4)
        >>> gci_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(green[y1:y2, x1:x2].data])
        [[1120. 1130. 1157. 1191.]
         [1111. 1137. 1190. 1221.]
         [1097. 1139. 1228. 1216.]]
        >>> print(gci_agg[y1:y2, x1:x2].data)
        [[0.35625    0.33097345 0.3223855  0.33417296]
         [0.3420342  0.29551452 0.29579833 0.31777233]
         [0.34822243 0.28270411 0.29641694 0.359375  ]]
    """

    validate_arrays(nir_agg, green_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_gci_cpu,
                                      dask_func=_gci_dask,
                                      cupy_func=_gci_cupy,
                                      dask_cupy_func=_gci_dask_cupy)

    out = mapper(nir_agg)(nir_agg.data, green_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 16
0
def evi(nir_agg: xr.DataArray,
        red_agg: xr.DataArray,
        blue_agg: xr.DataArray,
        c1=6.0,
        c2=7.5,
        soil_factor=1.0,
        gain=2.5,
        name='evi'):
    """
    Computes Enhanced Vegetation Index. Allows for importved sensitivity
    in high biomass regions, de-coupling of the canopy background signal
    and reduction of atmospheric influences.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
    red_agg : xr.DataArray
        2D array of red band data.
    blue_agg : xr.DataArray
        2D array of blue band data.
    c1 : float, default=6.0
        First coefficient of the aerosol resistance term.
    c2 : float, default=7.5
        Second coefficients of the aerosol resistance term.
    soil_factor : float, default=1.0
        Soil adjustment factor between -1.0 and 1.0.
    gain : float, default=2.5
        Amplitude adjustment factor.
    name : str, default='evi'
        Name of output DataArray.

    Returns
    -------
    evi_agg : xarray.DataArray of same type as inputs
        2D array of evi values.
        All other input attributes are preserved.

    References
    ----------
        - Wikipedia: https://en.wikipedia.org/wiki/Enhanced_vegetation_index

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> red = data['Red']
        >>> blue = data['Blue']
        >>> from xrspatial.multispectral import evi
        >>> # Generate EVI Aggregate Array
        >>> evi_agg = evi(nir_agg=nir, red_agg=red, blue_agg=blue)
        >>> nir.plot(aspect=2, size=4)
        >>> red.plot(aspect=2, size=4)
        >>> blue.plot(aspect=2, size=4)
        >>> evi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y, x = 100, 100
        >>> m, n = 3, 4
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(red[y1:y2, x1:x2].data)
        [[1327. 1329. 1363. 1392.]
         [1309. 1331. 1423. 1424.]
         [1293. 1337. 1455. 1414.]]
        >>> print(blue[y1:y2, x1:x2].data)
        [[1281. 1270. 1254. 1297.]
         [1241. 1249. 1280. 1309.]
         [1239. 1257. 1322. 1329.]]
        >>> print(evi_agg[y1:y2, x1:x2].data)
        [[-3.8247013 -9.51087    1.3733553  2.2960372]
         [11.818182   3.837838   0.6185031  1.3744428]
         [-8.53211    5.486726   0.8394608  3.5043988]]
    """

    if not red_agg.shape == nir_agg.shape == blue_agg.shape:
        raise ValueError("input layers expected to have equal shapes")

    if not isinstance(c1, (float, int)):
        raise ValueError("c1 must be numeric")

    if not isinstance(c2, (float, int)):
        raise ValueError("c2 must be numeric")

    if soil_factor > 1.0 or soil_factor < -1.0:
        raise ValueError("soil factor must be between [-1.0, 1.0]")

    if gain < 0:
        raise ValueError("gain must be greater than 0")

    validate_arrays(nir_agg, red_agg, blue_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_evi_cpu,
                                      dask_func=_evi_dask,
                                      cupy_func=_evi_cupy,
                                      dask_cupy_func=_evi_dask_cupy)

    out = mapper(red_agg)(nir_agg.data, red_agg.data, blue_agg.data, c1, c2,
                          soil_factor, gain)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 17
0
def true_color(r, g, b, nodata=1, c=10.0, th=0.125, name='true_color'):
    """
    Create true color composite from a combination of red, green and
    blue bands satellite images.

    A sigmoid function will be used to improve the contrast of output image.
    The function is defined as
    ``normalized_pixel = 1 / (1 + np.exp(c * (th - normalized_pixel)))``
    where ``c`` and ``th`` are contrast and brightness controlling parameters.

    Parameters
    ----------
    r : xarray.DataArray
        2D array of red band data.
    g : xarray.DataArray
        2D array of green band data.
    b : xarray.DataArray
        2D array of blue band data.
    nodata : int, float numeric value
        Nodata value of input DataArrays.
    c : float, default=10
        Contrast and brighness controlling parameter for output image.
    th : float, default=0.125
        Contrast and brighness controlling parameter for output image.
    name : str, default='true_color'
        Name of output DataArray.

    Returns
    -------
    true_color_agg : xarray.DataArray of the same type as inputs
        3D array true color image with dims of [y, x, band].
        All output attributes are copied from red band image.

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> red = data['Red']
        >>> green = data['Green']
        >>> blue = data['Blue']
        >>> from xrspatial.multispectral import true_color
        >>> # Generate true color image
        >>> true_color_img = true_color(r=red, g=green, b=blue)
        >>> true_color_img.plot.imshow()
    """

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_true_color_numpy,
        dask_func=_true_color_dask,
        cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'true_color() does not support cupy backed DataArray',  # noqa
        ),
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'true_color() does not support dask with cupy backed DataArray',  # noqa
        ),
    )
    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        out = mapper(r)(r, g, b, nodata, c, th)

    # TODO: output metadata: coords, dims, attrs
    _dims = ['y', 'x', 'band']
    _attrs = r.attrs
    _coords = {'y': r['y'], 'x': r['x'], 'band': [0, 1, 2, 3]}

    return DataArray(
        out,
        name=name,
        dims=_dims,
        coords=_coords,
        attrs=_attrs,
    )
Exemplo n.º 18
0
def ndmi(nir_agg: DataArray, swir1_agg: DataArray, name='ndmi'):
    """
    Computes Normalized Difference Moisture Index.
    Used to determine vegetation water content.

    Parameters
    ----------
    nir_agg : DataArray
        near-infrared band
        (Landsat 4-7: Band 4)
        (Landsat 8: Band 5)
    swir1_agg : DataArray
        shortwave infrared band
        (Landsat 4-7: Band 5)
        (Landsat 8: Band 6)
    name: str, optional (default ="ndmi")
        Name of output DataArray.

    Returns
    ----------
    xarray.DataArray
        2D array, of the same type as the input, of calculated ndmi values.
        All other input attributes are preserved.

    Notes:
    ----------
    Algorithm References:
    https://www.usgs.gov/land-resources/nli/landsat/normalized-difference-moisture-index

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> import xrspatial

    Create Sample Band Data
    >>> np.random.seed(0)
    >>> nir_agg = xr.DataArray(np.random.rand(4,4),
    >>>             dims = ["lat", "lon"])
    >>> height, width = nir_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> nir_agg["lat"] = _lat
    >>> nir_agg["lon"] = _lon

    >>> np.random.seed(5)
    >>> swir1_agg = xr.DataArray(np.random.rand(4,4),
    >>>            dims = ["lat", "lon"])
    >>> height, width = swir1_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> swir1_agg["lat"] = _lat
    >>> swir1_agg["lon"] = _lon

    >>> print(nir_agg, swir1_agg)
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.5488135 , 0.71518937, 0.60276338, 0.54488318],
           [0.4236548 , 0.64589411, 0.43758721, 0.891773  ],
           [0.96366276, 0.38344152, 0.79172504, 0.52889492],
           [0.56804456, 0.92559664, 0.07103606, 0.0871293 ]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.22199317, 0.87073231, 0.20671916, 0.91861091],
           [0.48841119, 0.61174386, 0.76590786, 0.51841799],
           [0.2968005 , 0.18772123, 0.08074127, 0.7384403 ],
           [0.44130922, 0.15830987, 0.87993703, 0.27408646]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0

    Create NDMI DataArray
    >>> data = xrspatial.multispectral.ndmi(nir_agg, swir1_agg)
    >>> print(data)
    <xarray.DataArray 'ndmi' (lat: 4, lon: 4)>
    array([[ 0.4239978 , -0.09807732,  0.48925604, -0.25536675],
           [-0.07099968,  0.02715428, -0.27280597,  0.26475493],
           [ 0.52906124,  0.34266992,  0.81491258, -0.16534329],
           [ 0.12556087,  0.70789018, -0.85060343, -0.51757753]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    """

    validate_arrays(nir_agg, swir1_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_normalized_ratio_cpu,
                                      dask_func=_run_normalized_ratio_dask,
                                      cupy_func=_run_normalized_ratio_cupy,
                                      dask_cupy_func=_run_normalized_ratio_dask_cupy)

    out = mapper(nir_agg)(nir_agg.data, swir1_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 19
0
def ebbi(red_agg: xr.DataArray,
         swir_agg: xr.DataArray,
         tir_agg: xr.DataArray,
         name='ebbi'):
    """
    Computes Enhanced Built-Up and Bareness Index (EBBI) which allows
    for easily distinguishing between built-up and bare land areas.

    Parameters
    ----------
    red_agg : xr.DataArray
        2D array of red band data.
    swir_agg : xr.DataArray
        2D array of shortwave infrared band data.
    tir_agg: xr.DataArray
        2D array of thermal infrared band data.
    name: str, default='ebbi'
        Name of output DataArray.

    Returns
    -------
    ebbi_agg = xr.DataArray of same type as inputs
        2D array of ebbi values.
        All other input attributes are preserved

    References
    ----------
        - rdrr: https://rdrr.io/cran/LSRS/man/EBBI.html

    Examples
    --------
    .. sourcecode:: python

        >>> # Imports
        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.multispectral import ebbi
        >>> # Create Sample Band Data, RED band
        >>> np.random.seed(1)
        >>> red_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
        >>> height, width = red_agg.shape
        >>> _lat = np.linspace(0, height - 1, height)
        >>> _lon = np.linspace(0, width - 1, width)
        >>> red_agg["lat"] = _lat
        >>> red_agg["lon"] = _lon
        >>> # SWIR band
        >>> np.random.seed(5)
        >>> swir_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
        >>> height, width = swir_agg.shape
        >>> _lat = np.linspace(0, height - 1, height)
        >>> _lon = np.linspace(0, width - 1, width)
        >>> swir_agg["lat"] = _lat
        >>> swir_agg["lon"] = _lon
        >>> # TIR band
        >>> np.random.seed(6)
        >>> tir_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
        >>> height, width = tir_agg.shape
        >>> _lat = np.linspace(0, height - 1, height)
        >>> _lon = np.linspace(0, width - 1, width)
        >>> tir_agg["lat"] = _lat
        >>> tir_agg["lon"] = _lon

        >>> print(red_agg, swir_agg, tir_agg)
        <xarray.DataArray (lat: 4, lon: 4)>
        array([[4.17022005e-01, 7.20324493e-01, 1.14374817e-04, 3.02332573e-01], # noqa
                [1.46755891e-01, 9.23385948e-02, 1.86260211e-01, 3.45560727e-01], # noqa
                [3.96767474e-01, 5.38816734e-01, 4.19194514e-01, 6.85219500e-01], # noqa
                [2.04452250e-01, 8.78117436e-01, 2.73875932e-02, 6.70467510e-01]]) # noqa
        Coordinates:
            * lat      (lat) float64 0.0 1.0 2.0 3.0
            * lon      (lon) float64 0.0 1.0 2.0 3.0
        <xarray.DataArray (lat: 4, lon: 4)>
        array([[0.22199317, 0.87073231, 0.20671916, 0.91861091],
                [0.48841119, 0.61174386, 0.76590786, 0.51841799],
                [0.2968005 , 0.18772123, 0.08074127, 0.7384403 ],
                [0.44130922, 0.15830987, 0.87993703, 0.27408646]])
        Coordinates:
            * lat      (lat) float64 0.0 1.0 2.0 3.0
            * lon      (lon) float64 0.0 1.0 2.0 3.0
        <xarray.DataArray (lat: 4, lon: 4)>
        array([[0.89286015, 0.33197981, 0.82122912, 0.04169663],
                [0.10765668, 0.59505206, 0.52981736, 0.41880743],
                [0.33540785, 0.62251943, 0.43814143, 0.73588211],
                [0.51803641, 0.5788586 , 0.6453551 , 0.99022427]])
        Coordinates:
            * lat      (lat) float64 0.0 1.0 2.0 3.0
            * lon      (lon) float64 0.0 1.0 2.0 3.0

        >>> # Create EBBI DataArray
        >>> ebbi_agg = ebbi(red_agg, swir_agg, tir_agg)
        >>> print(ebbi_agg)
        <xarray.DataArray 'ebbi' (lat: 4, lon: 4)>
        array([[-2.43983486, -2.58194492,  3.97432599, -0.42291921],
                [-0.11444052,  0.96786363,  0.59269999,  0.42374096],
                [ 0.61379897, -0.23840436, -0.05598088,  0.95193251],
                [ 1.32393891,  0.41574839,  0.72484653, -0.80669034]])
        Coordinates:
            * lat      (lat) float64 0.0 1.0 2.0 3.0
            * lon      (lon) float64 0.0 1.0 2.0 3.0
    """

    validate_arrays(red_agg, swir_agg, tir_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_ebbi_cpu,
                                      dask_func=_ebbi_dask,
                                      cupy_func=_ebbi_cupy,
                                      dask_cupy_func=_ebbi_dask_cupy)

    out = mapper(red_agg)(red_agg.data, swir_agg.data, tir_agg.data)

    return DataArray(out,
                     name=name,
                     coords=red_agg.coords,
                     dims=red_agg.dims,
                     attrs=red_agg.attrs)
Exemplo n.º 20
0
def focal_stats(agg,
                kernel,
                stats_funcs=[
                    'mean', 'max', 'min', 'range', 'std', 'var', 'sum'
                ]):
    """
    Calculates statistics of the values within a specified focal neighborhood
    for each pixel in an input raster. The statistics types are Mean, Maximum,
    Minimum, Range, Standard deviation, Variation and Sum.

    Parameters
    ----------
    agg : xarray.DataArray
        2D array of input values to be analysed. Can be a NumPy backed,
        Cupy backed, or Dask with NumPy backed DataArray.
    kernel : numpy.array
        2D array where values of 1 indicate the kernel.
    stats_funcs: list of string
        List of statistics types to be calculated.
        Default set to ['mean', 'max', 'min', 'range', 'std', 'var', 'sum'].

    Returns
    -------
    stats_agg : xarray.DataArray of same type as `agg`
        3D array with dimensions of `(stat, y, x)` and with values
        indicating the focal stats.

    Examples
    --------
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.convolution import circle_kernel
        >>> kernel = circle_kernel(1, 1, 1)
        >>> kernel
        array([[0., 1., 0.],
               [1., 1., 1.],
               [0., 1., 0.]])
        >>> data = np.array([
            [0, 0, 0, 0, 0, 0],
            [1, 1, 2, 2, 1, 1],
            [2, 2, 1, 1, 2, 2],
            [3, 3, 0, 0, 3, 3],
        ])
        >>> from xrspatial.focal import focal_stats
        >>> focal_stats(xr.DataArray(data), kernel, stats_funcs=['min', 'sum'])
        <xarray.DataArray 'focal_apply' (stats: 2, dim_0: 4, dim_1: 6)>
        array([[[0., 0., 0., 0., 0., 0.],
                [0., 0., 0., 0., 0., 0.],
                [1., 1., 0., 0., 1., 1.],
                [2., 0., 0., 0., 0., 2.]],
               [[1., 1., 2., 2., 1., 1.],
                [4., 6., 6., 6., 6., 4.],
                [8., 9., 6., 6., 9., 8.],
                [8., 8., 4., 4., 8., 8.]]])
        Coordinates:
          * stats    (stats) object 'min' 'sum'
        Dimensions without coordinates: dim_0, dim_1
    """
    # validate raster
    if not isinstance(agg, DataArray):
        raise TypeError("`agg` must be instance of DataArray")

    if agg.ndim != 2:
        raise ValueError("`agg` must be 2D")

    # Validate the kernel
    kernel = custom_kernel(kernel)

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_focal_stats_cpu,
        cupy_func=_focal_stats_cupy,
        dask_func=_focal_stats_cpu,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args, messages='focal_stats() does not support dask with cupy backed DataArray.'),
    )
    result = mapper(agg)(agg, kernel, stats_funcs)
    return result
Exemplo n.º 21
0
def apply(raster, kernel, func=_calc_mean, name='focal_apply'):
    """
    Returns custom function applied array using a user-created window.

    Parameters
    ----------
    raster : xarray.DataArray
        2D array of input values to be filtered. Can be a NumPy backed,
        or Dask with NumPy backed DataArray.
    kernel : numpy.ndarray
        2D array where values of 1 indicate the kernel.
    func : callable, default=xrspatial.focal._calc_mean
        Function which takes an input array and returns an array.

    Returns
    -------
    agg : xarray.DataArray of same type as `raster`
        2D aggregate array of filtered values.

    Examples
    --------
    Focal apply works with NumPy backed xarray DataArray
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.convolution import circle_kernel
        >>> from xrspatial.focal import apply
        >>> data = np.arange(20, dtype=np.float64).reshape(4, 5)
        >>> raster = xr.DataArray(data, dims=['y', 'x'], name='raster')
        >>> print(raster)
        <xarray.DataArray 'raster' (y: 4, x: 5)>
        array([[ 0.,  1.,  2.,  3.,  4.],
               [ 5.,  6.,  7.,  8.,  9.],
               [10., 11., 12., 13., 14.],
               [15., 16., 17., 18., 19.]])
        Dimensions without coordinates: y, x
        >>> kernel = circle_kernel(2, 2, 3)
        >>> kernel
        array([[0., 1., 0.],
               [1., 1., 1.],
               [0., 1., 0.]])
        >>> # apply kernel mean by default
        >>> apply_mean_agg = apply(raster, kernel)
        >>> apply_mean_agg
        <xarray.DataArray 'focal_apply' (y: 4, x: 5)>
        array([[ 2.        ,  2.25   ,  3.25      ,  4.25      ,  5.33333333],
               [ 5.25      ,  6.     ,  7.        ,  8.        ,  8.75      ],
               [10.25      , 11.     , 12.        , 13.        , 13.75      ],
               [13.66666667, 14.75   , 15.75      , 16.75      , 17.        ]])
        Dimensions without coordinates: y, x

    Focal apply works with Dask with NumPy backed xarray DataArray.
    Note that if input raster is a numpy or dask with numpy backed data array,
    the applied function must be decorated with ``numba.jit``
    xrspatial already provides ``ngjit`` decorator, where:
    ``ngjit = numba.jit(nopython=True, nogil=True)``

    .. sourcecode:: python

    >>> from xrspatial.utils import ngjit
    >>> from xrspatial.convolution import custom_kernel
    >>> kernel = custom_kernel(np.array([
        [0, 1, 0],
        [0, 1, 1],
        [0, 1, 0],
    ]))
    >>> weight = np.array([
        [0, 0.5, 0],
        [0, 1, 0.5],
        [0, 0.5, 0],
    ])
    >>> @ngjit
    >>> def func(kernel_data):
    ...     weight = np.array([
    ...         [0, 0.5, 0],
    ...         [0, 1, 0.5],
    ...         [0, 0.5, 0],
    ...     ])
    ...     return np.nansum(kernel_data * weight)

    >>> import dask.array as da
    >>> data_da = da.from_array(np.ones((6, 4), dtype=np.float64), chunks=(3, 2))
    >>> raster_da = xr.DataArray(data_da, dims=['y', 'x'], name='raster_da')
    >>> print(raster_da)
    <xarray.DataArray 'raster_da' (y: 6, x: 4)>
    dask.array<array, shape=(6, 4), dtype=float64, chunksize=(3, 2), chunktype=numpy.ndarray>  # noqa
    Dimensions without coordinates: y, x
    >>> apply_func_agg = apply(raster_da, kernel, func)
    >>> print(apply_func_agg)
    <xarray.DataArray 'focal_apply' (y: 6, x: 4)>
    dask.array<_trim, shape=(6, 4), dtype=float64, chunksize=(3, 2), chunktype=numpy.ndarray>  # noqa
    Dimensions without coordinates: y, x
    >>> print(apply_func_agg.compute())
    <xarray.DataArray 'focal_apply' (y: 6, x: 4)>
    array([[2. , 2. , 2. , 1.5],
           [2.5, 2.5, 2.5, 2. ],
           [2.5, 2.5, 2.5, 2. ],
           [2.5, 2.5, 2.5, 2. ],
           [2.5, 2.5, 2.5, 2. ],
           [2. , 2. , 2. , 1.5]])
    Dimensions without coordinates: y, x
    """
    # validate raster
    if not isinstance(raster, DataArray):
        raise TypeError("`raster` must be instance of DataArray")

    if raster.ndim != 2:
        raise ValueError("`raster` must be 2D")

    # Validate the kernel
    kernel = custom_kernel(kernel)

    # apply kernel to raster values
    # if raster is a numpy or dask with numpy backed data array,
    # the function func must be a @ngjit
    mapper = ArrayTypeFunctionMapping(
        numpy_func=_apply_numpy,
        cupy_func=lambda *args: not_implemented_func(
            *args, messages='apply() does not support cupy backed DataArray.'),
        dask_func=_apply_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'apply() does not support dask with cupy backed DataArray.'),
    )
    out = mapper(raster)(raster.data, kernel, func)
    result = DataArray(out,
                       name=name,
                       coords=raster.coords,
                       dims=raster.dims,
                       attrs=raster.attrs)
    return result
Exemplo n.º 22
0
def equal_interval(agg: xr.DataArray,
                   k: int = 5,
                   name: Optional[str] = 'equal_interval') -> xr.DataArray:
    """
    Reclassifies data for array `agg` into new values based on intervals
    of equal width.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of values to be reclassified.
    k : int, default=5
        Number of classes to be produced.
    name : str, default='equal_interval'
        Name of output aggregate.

    Returns
    -------
    equal_interval_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of equal interval allocations.
        All other input attributes are preserved.

    References
    ----------
        - PySAL: https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#EqualInterval # noqa
        - scikit-learn: https://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html#sphx-glr-auto-examples-classification-plot-classifier-comparison-py # noqa

    Examples
    --------
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial.classify import equal_interval
        >>> elevation = np.array([
            [np.nan,  1.,  2.,  3.,  4.],
            [ 5.,  6.,  7.,  8.,  9.],
            [10., 11., 12., 13., 14.],
            [15., 16., 17., 18., 19.],
            [20., 21., 22., 23., np.inf]
        ])
        >>> agg_numpy = xr.DataArray(elevation, attrs={'res': (10.0, 10.0)})
        >>> numpy_equal_interval = equal_interval(agg_numpy, k=5)
        >>> print(numpy_equal_interval)
        <xarray.DataArray 'equal_interval' (dim_0: 5, dim_1: 5)>
        array([[nan,  0.,  0.,  0.,  0.],
               [ 0.,  0.,  0.,  0.,  1.],
               [ 1.,  1.,  1.,  1.,  1.],
               [ 1.,  2.,  2.,  2.,  2.],
               [ 2.,  2.,  2.,  2., nan]], dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10.0, 10.0)
    """

    mapper = ArrayTypeFunctionMapping(
        numpy_func=lambda *args: _run_equal_interval(*args, module=np),
        dask_func=lambda *args: _run_equal_interval(*args, module=da),
        cupy_func=lambda *args: _run_equal_interval(*args, module=cupy),
        dask_cupy_func=lambda *args: not_implemented_func(
            *args, messages='equal_interval() does support dask with cupy backed DataArray.'),  # noqa
    )
    out = mapper(agg)(agg, k)
    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Exemplo n.º 23
0
def arvi(nir_agg: DataArray, red_agg: DataArray, blue_agg: DataArray,
         name='arvi'):
    """
    Computes Atmospherically Resistant Vegetation Index.
    Allows for molecular and ozone correction with no further
    need for aerosol correction, except for dust conditions.

    Parameters
    ----------
    nir_agg : DataArray
        near-infrared band data
    red_agg : DataArray
        red band data
    blue_agg : DataArray
        blue band data
    name: str, optional (default = "arvi")
        Name of output DataArray

    Returns
    ----------
    xarray.DataArray
        2D array, of the same type as the input, of calculated arvi values.
        All other input attributes are preserved.

    Notes:
    ----------
    Algorithm References:
    https://modis.gsfc.nasa.gov/sci_team/pubs/abstract_new.php?id=03667

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> import xrspatial

        Create Sample Band Data
    >>> np.random.seed(0)
    >>> nir_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
    >>> height, width = nir_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> nir_agg["lat"] = _lat
    >>> nir_agg["lon"] = _lon

    >>> np.random.seed(1)
    >>> red_agg = xr.DataArray(np.random.rand(4,4), dims = ["lat", "lon"])
    >>> height, width = red_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> red_agg["lat"] = _lat
    >>> red_agg["lon"] = _lon

    >>> np.random.seed(2)
    >>> blue_agg = xr.DataArray(np.random.rand(4,4),
    >>> dims = ["lat", "lon"])
    >>> height, width = blue_agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> blue_agg["lat"] = _lat
    >>> blue_agg["lon"] = _lon

    >>> print(nir_agg, red_agg, blue_agg)
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.5488135 , 0.71518937, 0.60276338, 0.54488318],
           [0.4236548 , 0.64589411, 0.43758721, 0.891773  ],
           [0.96366276, 0.38344152, 0.79172504, 0.52889492],
           [0.56804456, 0.92559664, 0.07103606, 0.0871293 ]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
     <xarray.DataArray (lat: 4, lon: 4)>
    array([[4.17022005e-01, 7.20324493e-01, 1.14374817e-04, 3.02332573e-01],
           [1.46755891e-01, 9.23385948e-02, 1.86260211e-01, 3.45560727e-01],
           [3.96767474e-01, 5.38816734e-01, 4.19194514e-01, 6.85219500e-01],
           [2.04452250e-01, 8.78117436e-01, 2.73875932e-02, 6.70467510e-01]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.4359949 , 0.02592623, 0.54966248, 0.43532239],
           [0.4203678 , 0.33033482, 0.20464863, 0.61927097],
           [0.29965467, 0.26682728, 0.62113383, 0.52914209],
           [0.13457995, 0.51357812, 0.18443987, 0.78533515]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0

    Create ARVI DataArray
    >>> data = xrspatial.multispectral.arvi(nir_agg, red_agg, blue_agg)
    >>> print(data)
    <xarray.DataArray 'arvi' (lat: 4, lon: 4)>
    array([[ 0.08288985, -0.32062735,  0.99960309,  0.23695335],
           [ 0.48395093,  0.68183958,  0.26579331,  0.37232558],
           [ 0.22839874, -0.24733151,  0.2551784 , -0.12864117],
           [ 0.26424862, -0.09922362,  0.64689773, -0.21165207]])
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    """

    validate_arrays(red_agg, nir_agg, blue_agg)

    mapper = ArrayTypeFunctionMapping(numpy_func=_arvi_cpu,
                                      dask_func=_arvi_dask,
                                      cupy_func=_arvi_cupy,
                                      dask_cupy_func=_arvi_dask_cupy)

    out = mapper(red_agg)(nir_agg.data, red_agg.data, blue_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 24
0
def aspect(agg: xr.DataArray, name: Optional[str] = 'aspect') -> xr.DataArray:
    """
    Calculates the aspect value of an elevation aggregate.

    Calculates, for all cells in the array, the downward slope direction
    of each cell based on the elevation of its neighbors in a 3x3 grid.
    The value is measured clockwise in degrees with 0 (due north), and 360
    (again due north). Values along the edges are not calculated.

    Direction of the aspect can be determined by its value:
    From 0     to 22.5:  North
    From 22.5  to 67.5:  Northeast
    From 67.5  to 112.5: East
    From 112.5 to 157.5: Southeast
    From 157.5 to 202.5: South
    From 202.5 to 247.5: West
    From 247.5 to 292.5: Northwest
    From 337.5 to 360:   North

    Note that values of -1 denote flat areas.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, or Dask with NumPy-backed xarray DataArray
        of elevation values.
    name : str, default='aspect'
        Name of ouput DataArray.

    Returns
    -------
    aspect_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of calculated aspect values.
        All other input attributes are preserved.

    References
    ----------
        - arcgis: http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-aspect-works.htm#ESRI_SECTION1_4198691F8852475A9F4BC71246579FAA # noqa

    Examples
    --------
    Aspect works with NumPy backed xarray DataArray
    .. sourcecode:: python
        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial import aspect

        >>> data = np.array([
            [1, 1, 1, 1, 1],
            [1, 1, 1, 2, 0],
            [1, 1, 1, 0, 0],
            [4, 4, 9, 2, 4],
            [1, 5, 0, 1, 4],
            [1, 5, 0, 5, 5]
        ], dtype=np.float32)
        >>> raster = xr.DataArray(data, dims=['y', 'x'], name='raster')
        >>> print(raster)
        <xarray.DataArray 'raster' (y: 6, x: 5)>
        array([[1., 1., 1., 1., 1.],
               [1., 1., 1., 2., 0.],
               [1., 1., 1., 0., 0.],
               [4., 4., 9., 2., 4.],
               [1., 5., 0., 1., 4.],
               [1., 5., 0., 5., 5.]])
        Dimensions without coordinates: y, x
        >>> aspect_agg = aspect(raster)
        >>> print(aspect_agg)
        <xarray.DataArray 'aspect' (y: 6, x: 5)>
        array([[ nan,  nan        ,   nan       ,   nan       , nan],
               [ nan,  -1.        ,   225.      ,   135.      , nan],
               [ nan, 343.61045967,   8.97262661,  33.69006753, nan],
               [ nan, 307.87498365,  71.56505118,  54.46232221, nan],
               [ nan, 191.30993247, 144.46232221, 255.96375653, nan],
               [ nan,  nan        ,   nan       ,   nan       , nan]])
        Dimensions without coordinates: y, x

    Aspect works with Dask with NumPy backed xarray DataArray
    .. sourcecode:: python
        >>> import dask.array as da
        >>> data_da = da.from_array(data, chunks=(3, 3))
        >>> raster_da = xr.DataArray(data_da, dims=['y', 'x'], name='raster_da')
        >>> print(raster_da)
        <xarray.DataArray 'raster' (y: 6, x: 5)>
        dask.array<array, shape=(6, 5), dtype=int64, chunksize=(3, 3), chunktype=numpy.ndarray>
        Dimensions without coordinates: y, x
        >>> aspect_da = aspect(raster_da)
        >>> print(aspect_da)
        <xarray.DataArray 'aspect' (y: 6, x: 5)>
        dask.array<_trim, shape=(6, 5), dtype=float32, chunksize=(3, 3), chunktype=numpy.ndarray>
        Dimensions without coordinates: y, x
        >>> print(aspect_da.compute())  # compute the results
        <xarray.DataArray 'aspect' (y: 6, x: 5)>
        array([[ nan,  nan        ,   nan       ,   nan       , nan],
               [ nan,  -1.        ,   225.      ,   135.      , nan],
               [ nan, 343.61045967,   8.97262661,  33.69006753, nan],
               [ nan, 307.87498365,  71.56505118,  54.46232221, nan],
               [ nan, 191.30993247, 144.46232221, 255.96375653, nan],
               [ nan,  nan        ,   nan       ,   nan       , nan]])
        Dimensions without coordinates: y, x

    Aspect works with CuPy backed xarray DataArray.
    Make sure you have a GPU and CuPy installed to run this example.
    .. sourcecode:: python
        >>> import cupy
        >>> data_cupy = cupy.asarray(data)
        >>> raster_cupy = xr.DataArray(data_cupy, dims=['y', 'x'])
        >>> aspect_cupy = aspect(raster_cupy)
        >>> print(type(aspect_cupy.data))
        <class 'cupy.core.core.ndarray'>
        >>> print(aspect_cupy)
        <xarray.DataArray 'aspect' (y: 6, x: 5)>
        array([[       nan,       nan,        nan,        nan,        nan],
               [       nan,       -1.,       225.,       135.,        nan],
               [       nan, 343.61047,   8.972626,  33.690067,        nan],
               [       nan, 307.87497,  71.56505 ,  54.462322,        nan],
               [       nan, 191.30994, 144.46233 ,  255.96376,        nan],
               [       nan,       nan,        nan,        nan,        nan]],
              dtype=float32)
        Dimensions without coordinates: y, x
    """
    mapper = ArrayTypeFunctionMapping(
        numpy_func=_run_numpy,
        dask_func=_run_dask_numpy,
        cupy_func=_run_cupy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'aspect() does not support dask with cupy backed DataArray'
        )  # noqa
    )

    out = mapper(agg)(agg.data)

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Exemplo n.º 25
0
def nbr2(swir1_agg: xr.DataArray, swir2_agg: xr.DataArray, name='nbr2'):
    """
    Computes Normalized Burn Ratio 2 "NBR2 modifies the Normalized Burn
    Ratio (NBR) to highlight water sensitivity in vegetation and may be
    useful in post-fire recovery studies." [1]_

    Parameters
    ----------
    swir1_agg : xr.DataArray
        2D array of near-infrared band data.
        shortwave infrared band
        (Sentinel 2: Band 11)
        (Landsat 4-7: Band 5)
        (Landsat 8: Band 6)
    swir2_agg : xr.DataArray
        2D array of shortwave infrared band data.
        (Landsat 4-7: Band 6)
        (Landsat 8: Band 7)
    name : str default='nbr2'
        Name of output DataArray.

    Returns
    -------
    nbr2_agg : xr.DataArray of same type as inputs.
        2D array of nbr2 values.
        All other input attributes are preserved.

    Notes
    -----
    .. [1] https://www.usgs.gov/land-resources/nli/landsat/landsat-normalized-burn-ratio-2 # noqa

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> swir1 = data['SWIR1']
        >>> swir2 = data['SWIR2']
        >>> from xrspatial.multispectral import nbr2
        >>> # Generate NBR2 Aggregate Array
        >>> nbr2_agg = nbr2(swir1_agg=swir1, swir2_agg=swir2)
        >>> swir1.plot(aspect=2, size=4)
        >>> swir2.plot(aspect=2, size=4)
        >>> nbr2_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(swir1[y1:y2, x1:x2].data)
        [[2092. 2242. 2333. 2382.]
         [2017. 2150. 2303. 2344.]
         [2124. 2244. 2367. 2452.]]
        >>> print(swir2[y1:y2, x1:x2].data)
        [[1866. 1962. 2086. 2112.]
         [1811. 1900. 2012. 2041.]
         [1838. 1956. 2067. 2109.]]
        >>> print(nbr2_agg[y1:y2, x1:x2].data)
        [[0.05709954 0.06660324 0.055895   0.06008011]
         [0.053814   0.0617284  0.06743917 0.0690992 ]
         [0.07218576 0.06857143 0.067659   0.07520281]]
    """

    validate_arrays(swir1_agg, swir2_agg)

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_normalized_ratio_cpu,
        dask_func=_run_normalized_ratio_dask,
        cupy_func=_run_normalized_ratio_cupy,
        dask_cupy_func=_run_normalized_ratio_dask_cupy,
    )

    out = mapper(swir1_agg)(swir1_agg.data, swir2_agg.data)

    return DataArray(out,
                     name=name,
                     coords=swir1_agg.coords,
                     dims=swir1_agg.dims,
                     attrs=swir1_agg.attrs)
Exemplo n.º 26
0
def slope(agg: xr.DataArray, name: str = 'slope') -> xr.DataArray:
    """
    Returns slope of input aggregate in degrees.

    Parameters
    ----------
    agg : xr.DataArray
        2D array of elevation data.
    name : str, default='slope'
        Name of output DataArray.

    Returns
    -------
    slope_agg : xr.DataArray of same type as `agg`
        2D array of slope values.
        All other input attributes are preserved.

    References
    ----------
        - arcgis: http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-slope-works.htm # noqa

    Examples
    --------
    .. sourcecode:: python

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial import slope
        >>> data = np.array([
        ...     [0, 0, 0, 0, 0],
        ...     [0, 0, 0, -1, 2],
        ...     [0, 0, 0, 0, 1],
        ...     [0, 0, 0, 5, 0]])
        >>> agg = xr.DataArray(data)
        >>> slope_agg = slope(agg)
        >>> slope_agg
        <xarray.DataArray 'slope' (dim_0: 4, dim_1: 5)>
        array([[      nan,       nan,       nan,       nan,       nan],
               [      nan,  0.      , 14.036243, 32.512516,       nan],
               [      nan,  0.      , 42.031113, 53.395725,       nan],
               [      nan,       nan,       nan,       nan,       nan]],
              dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)
    mapper = ArrayTypeFunctionMapping(
        numpy_func=_run_numpy,
        cupy_func=_run_cupy,
        dask_func=_run_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'slope() does not support dask with cupy backed DataArray'  # noqa
        ),
    )
    out = mapper(agg)(agg.data, cellsize_x, cellsize_y)

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Exemplo n.º 27
0
def generate_terrain(agg: xr.DataArray,
                     x_range: tuple = (0, 500),
                     y_range: tuple = (0, 500),
                     seed: int = 10,
                     zfactor: int = 4000,
                     full_extent: Optional[Union[Tuple, List]] = None,
                     name: str = 'terrain') -> xr.DataArray:
    """
    Generates a pseudo-random terrain which can be helpful for testing
    raster functions.

    Parameters
    ----------
    x_range : tuple, default=(0, 500)
        Range of x values.
    x_range : tuple, default=(0, 500)
        Range of y values.
    seed : int, default=10
        Seed for random number generator.
    zfactor : int, default=4000
        Multipler for z values.
    full_extent : str, default=None
        bbox<xmin, ymin, xmax, ymax>. Full extent of coordinate system.

    Returns
    -------
    terrain : xr.DataArray
        2D array of generated terrain values.

    References
    ----------
        - Michael McHugh: https://www.youtube.com/watch?v=O33YV4ooHSo
        - Red Blob Games: https://www.redblobgames.com/maps/terrain-from-noise/

    Examples
    --------
    .. plot::
       :include-source:

        >>> import numpy as np
        >>> import xarray as xr
        >>> from xrspatial import generate_terrain

        >>> W = 400
        >>> H = 300
        >>> data = np.zeros((H, W), dtype=np.float32)
        >>> raster = xr.DataArray(data, dims=['y', 'x'])
        >>> xrange = (-20e6, 20e6)
        >>> yrange = (-20e6, 20e6)
        >>> seed = 2
        >>> zfactor = 10

        >>> terrain = generate_terrain(raster, xrange, yrange, seed, zfactor)
        >>> terrain.plot.imshow()
    """

    height, width = agg.shape

    if full_extent is None:
        full_extent = (x_range[0], y_range[0], x_range[1], y_range[1])

    elif not isinstance(full_extent, (list, tuple)) and len(full_extent) != 4:
        raise TypeError('full_extent must be tuple(4)')

    full_xrange = (full_extent[0], full_extent[2])
    full_yrange = (full_extent[1], full_extent[3])

    x_range_scaled = (_scale(x_range[0], full_xrange, (0.0, 1.0)),
                      _scale(x_range[1], full_xrange, (0.0, 1.0)))

    y_range_scaled = (_scale(y_range[0], full_yrange, (0.0, 1.0)),
                      _scale(y_range[1], full_yrange, (0.0, 1.0)))

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_terrain_numpy,
        cupy_func=_terrain_cupy,
        dask_func=_terrain_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'generate_terrain() does not support dask with cupy backed DataArray'  # noqa
        ))
    out = mapper(agg)(agg.data, seed, x_range_scaled, y_range_scaled, zfactor)
    canvas = ds.Canvas(plot_width=width,
                       plot_height=height,
                       x_range=x_range,
                       y_range=y_range)

    # DataArray coords were coming back different from cvs.points...
    hack_agg = canvas.points(pd.DataFrame({'x': [], 'y': []}), 'x', 'y')
    res = get_dataarray_resolution(hack_agg)
    result = xr.DataArray(out,
                          name=name,
                          coords=hack_agg.coords,
                          dims=hack_agg.dims,
                          attrs={'res': res})

    return result
Exemplo n.º 28
0
def ndvi(nir_agg: xr.DataArray, red_agg: xr.DataArray, name='ndvi'):
    """
    Computes Normalized Difference Vegetation Index (NDVI). Used to
    determine if a cell contains live green vegetation.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
    red_agg : xr.DataArray
        2D array red band data.
    name : str default='ndvi'
        Name of output DataArray.

    Returns
    -------
    ndvi_agg : xarray.DataArray of same type as inputs
        2D array of ndvi values.
        All other input attributes are preserved.

    References
    ----------
        - Chris Holden: http://ceholden.github.io/open-geo-tutorial/python/chapter_2_indices.html # noqa

    Examples
    --------
    .. plot::
       :include-source:
        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> red = data['Red']
        >>> from xrspatial.multispectral import ndvi
        >>> # Generate NDVI Aggregate Array
        >>> ndvi_agg = ndvi(nir_agg=nir, red_agg=red)
        >>> nir.plot(aspect=2, size=4)
        >>> red.plot(aspect=2, size=4)
        >>> ndvi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(red[y1:y2, x1:x2].data)
        [[1327. 1329. 1363. 1392.]
         [1309. 1331. 1423. 1424.]
         [1293. 1337. 1455. 1414.]]
        >>> print(ndvi_agg[y1:y2, x1:x2].data)
        [[0.06746311 0.06177197 0.05772555 0.0660852 ]
         [0.065      0.05064194 0.04013491 0.06099571]
         [0.06709956 0.04431737 0.04496226 0.07792632]]
    """

    validate_arrays(nir_agg, red_agg)

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_normalized_ratio_cpu,
        dask_func=_run_normalized_ratio_dask,
        cupy_func=_run_normalized_ratio_cupy,
        dask_cupy_func=_run_normalized_ratio_dask_cupy,
    )

    out = mapper(nir_agg)(nir_agg.data, red_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Exemplo n.º 29
0
def curvature(agg: xr.DataArray,
              name: Optional[str] = 'curvature') -> xr.DataArray:
    """
    Calculates, for all cells in the array, the curvature (second
    derivative) of each cell based on the elevation of its neighbors
    in a 3x3 grid. A positive curvature indicates the surface is
    upwardly convex. A negative value indicates it is upwardly
    concave. A value of 0 indicates a flat surface.

    Units of the curvature output raster are one hundredth (1/100)
    of a z-unit.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask xarray DataArray of elevation values.
        Must contain `res` attribute.
    name : str, default='curvature'
        Name of output DataArray.

    Returns
    -------
    curvature_agg : xarray.DataArray, of the same type as `agg`
        2D aggregate array of curvature values.
        All other input attributes are preserved.

    References
    ----------
        - arcgis: https://pro.arcgis.com/en/pro-app/latest/tool-reference/spatial-analyst/how-curvature-works.htm # noqa

    Examples
    --------
    Curvature works with NumPy backed xarray DataArray
    .. sourcecode:: python

        >>> import numpy as np
        >>> import dask.array as da
        >>> import xarray as xr
        >>> from xrspatial import curvature
        >>> flat_data = np.zeros((5, 5), dtype=np.float32)
        >>> flat_raster = xr.DataArray(flat_data, attrs={'res': (1, 1)})
        >>> flat_curv = curvature(flat_raster)
        >>> print(flat_curv)
        <xarray.DataArray 'curvature' (dim_0: 5, dim_1: 5)>
        array([[nan, nan, nan, nan, nan],
               [nan, -0., -0., -0., nan],
               [nan, -0., -0., -0., nan],
               [nan, -0., -0., -0., nan],
               [nan, nan, nan, nan, nan]])
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (1, 1)

    Curvature works with Dask with NumPy backed xarray DataArray
    .. sourcecode:: python

        >>> convex_data = np.array([
            [0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0],
            [0, 0, -1, 0, 0],
            [0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0]], dtype=np.float32)
        >>> convex_raster = xr.DataArray(
            da.from_array(convex_data, chunks=(3, 3)),
            attrs={'res': (10, 10)}, name='convex_dask_numpy_raster')
        >>> print(convex_raster)
        <xarray.DataArray 'convex_dask_numpy_raster' (dim_0: 5, dim_1: 5)>
        dask.array<array, shape=(5, 5), dtype=float32, chunksize=(3, 3), chunktype=numpy.ndarray>
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10, 10)
        >>> convex_curv = curvature(convex_raster, name='convex_curvature')
        >>> print(convex_curv)  # return a xarray DataArray with Dask-backed array
        <xarray.DataArray 'convex_curvature' (dim_0: 5, dim_1: 5)>
        dask.array<_trim, shape=(5, 5), dtype=float32, chunksize=(3, 3), chunktype=numpy.ndarray>
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10, 10)
        >>> print(convex_curv.compute())
        <xarray.DataArray 'convex_curvature' (dim_0: 5, dim_1: 5)>
        array([[nan, nan, nan, nan, nan],
               [nan, -0.,  1., -0., nan],
               [nan,  1., -4.,  1., nan],
               [nan, -0.,  1., -0., nan],
               [nan, nan, nan, nan, nan]])
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10, 10)

    Curvature works with CuPy backed xarray DataArray.
    .. sourcecode:: python

        >>> import cupy
        >>> concave_data = np.array([
            [0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0],
            [0, 0, 1, 0, 0],
            [0, 0, 0, 0, 0],
            [0, 0, 0, 0, 0]], dtype=np.float32)
        >>> concave_raster = xr.DataArray(
            cupy.asarray(concave_data),
            attrs={'res': (10, 10)}, name='concave_cupy_raster')
        >>> concave_curv = curvature(concave_raster)
        >>> print(type(concave_curv.data))
        <class 'cupy.core.core.ndarray'>
        >>> print(concave_curv)
        <xarray.DataArray 'curvature' (dim_0: 5, dim_1: 5)>
        array([[nan, nan, nan, nan, nan],
               [nan, -0., -1., -0., nan],
               [nan, -1.,  4., -1., nan],
               [nan, -0., -1., -0., nan],
               [nan, nan, nan, nan, nan]], dtype=float32)
        Dimensions without coordinates: dim_0, dim_1
        Attributes:
            res:      (10, 10)
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)
    cellsize = (cellsize_x + cellsize_y) / 2

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_run_numpy,
        cupy_func=_run_cupy,
        dask_func=_run_dask_numpy,
        dask_cupy_func=lambda *args: not_implemented_func(
            *args,
            messages=
            'curvature() does not support dask with cupy backed DataArray.'
        ),  # noqa
    )
    out = mapper(agg)(agg.data, cellsize)
    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Exemplo n.º 30
0
def ndmi(nir_agg: xr.DataArray, swir1_agg: xr.DataArray, name='ndmi'):
    """
    Computes Normalized Difference Moisture Index. Used to determine
    vegetation water content.

    Parameters
    ----------
    nir_agg : xr.DataArray
        2D array of near-infrared band data.
        (Landsat 4-7: Band 4)
        (Landsat 8: Band 5)
    swir1_agg : xr.DataArray
        2D array of shortwave infrared band.
        (Landsat 4-7: Band 5)
        (Landsat 8: Band 6)
    name: str, default='ndmi'
        Name of output DataArray.

    Returns
    -------
    ndmi_agg : xr.DataArray of same type as inputs
        2D array of ndmi values.
        All other input attributes are preserved.

    References
    ----------
        - USGS: https://www.usgs.gov/land-resources/nli/landsat/normalized-difference-moisture-index # noqa

    Examples
    --------
    .. plot::
       :include-source:

        >>> from xrspatial.datasets import get_data
        >>> data = get_data('sentinel-2')  # Open Example Data
        >>> nir = data['NIR']
        >>> swir1 = data['SWIR1']
        >>> from xrspatial.multispectral import ndmi
        >>> # Generate NDMI Aggregate Array
        >>> ndmi_agg = ndmi(nir_agg=nir, swir1_agg=swir1)
        >>> nir.plot(aspect=2, size=4)
        >>> swir1.plot(aspect=2, size=4)
        >>> ndmi_agg.plot(aspect=2, size=4)

    .. sourcecode:: python

        >>> y1, x1, y2, x2 = 100, 100, 103, 104
        >>> print(nir[y1:y2, x1:x2].data)
        [[1519. 1504. 1530. 1589.]
         [1491. 1473. 1542. 1609.]
         [1479. 1461. 1592. 1653.]]
        >>> print(swir1[y1:y2, x1:x2].data)
        [[2092. 2242. 2333. 2382.]
         [2017. 2150. 2303. 2344.]
         [2124. 2244. 2367. 2452.]]
        >>> print(ndmi_agg[y1:y2, x1:x2].data)
        [[-0.15868181 -0.19701014 -0.20786953 -0.1996978 ]
         [-0.149943   -0.18686172 -0.19791937 -0.18593474]
         [-0.17901748 -0.21133603 -0.19575651 -0.19464068]]
    """

    validate_arrays(nir_agg, swir1_agg)

    mapper = ArrayTypeFunctionMapping(
        numpy_func=_normalized_ratio_cpu,
        dask_func=_run_normalized_ratio_dask,
        cupy_func=_run_normalized_ratio_cupy,
        dask_cupy_func=_run_normalized_ratio_dask_cupy,
    )

    out = mapper(nir_agg)(nir_agg.data, swir1_agg.data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)