Example #1
0
def _apply(data, kernel, func):
    # numpy case
    if isinstance(data, np.ndarray):
        if not (issubclass(data.dtype.type, np.integer)
                or issubclass(data.dtype.type, np.floating)):
            raise ValueError("data type must be integer or float")

        out = _apply_numpy(data, kernel, func)

    # cupy case
    elif has_cuda() and isinstance(data, cupy.ndarray):
        if not (issubclass(data.dtype.type, cupy.integer)
                or issubclass(data.dtype.type, cupy.floating)):
            raise ValueError("data type must be integer or float")
        out = _apply_cupy(data, cupy.asarray(kernel), func)

    # dask + cupy case
    elif has_cuda() and isinstance(data, da.Array) and \
            type(data._meta).__module__.split('.')[0] == 'cupy':
        out = _apply_dask_cupy(data, cupy.asarray(kernel), func)

    # dask + numpy case
    elif isinstance(data, da.Array):
        out = _apply_dask_numpy(data, kernel, func)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(data)))

    return out
Example #2
0
def convolve_2d(image, kernel, pad=True, use_cuda=True):
    """Function to call the 2D convolution via Numba.
    The Numba convolution function does not account for an edge so
    if we wish to take this into account, will pad the image array.
    """
    # Don't allow padding on (1, 1) kernel
    if (kernel.shape[0] == 1 and kernel.shape[1] == 1):
        pad = False

    if pad:
        pad_rows = kernel.shape[0] // 2
        pad_cols = kernel.shape[1] // 2
        pad_width = ((pad_rows, pad_rows), (pad_cols, pad_cols))
    else:
        # If padding is not desired, set pads to 0
        pad_rows = 0
        pad_cols = 0
        pad_width = 0

    padded_image = np.pad(image, pad_width=pad_width, mode="reflect")
    result = np.empty_like(padded_image)

    if has_cuda() and use_cuda:
        griddim, blockdim = cuda_args(padded_image.shape)
        _convolve_2d_cuda[griddim, blockdim](result, kernel, padded_image)
    else:
        result = _convolve_2d(kernel, padded_image)

    if pad:
        result = result[pad_rows:-pad_rows, pad_cols:-pad_cols]

    if result.shape != image.shape:
        raise ValueError("Output and input rasters are not the same shape.")

    return result
Example #3
0
def slope(agg:xr.DataArray, name: str='slope') -> xr.DataArray:
    """Returns slope of input aggregate in degrees.

    Parameters
    ----------
    agg : xr.DataArray
    name : str - name property of output xr.DataArray

    Returns
    -------
    data: xr.DataArray

    Notes:
    ------
    Algorithm References:
     - http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-slope-works.htm
     - Burrough, P. A., and McDonell, R. A., 1998.
      Principles of Geographical Information Systems
      (Oxford University Press, New York), pp 406
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, cellsize_x, cellsize_y)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, cellsize_x, cellsize_y)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, cellsize_x, cellsize_y)
    
    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, cellsize_x, cellsize_y)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #4
0
def savi(nir_agg, red_agg, soil_factor=1.0, name='savi', use_cuda=True, use_cupy=True):
    """Returns Soil Adjusted Vegetation Index (SAVI).

    Parameters
    ----------
    nir_agg : DataArray
        near-infrared band data

    red_agg : DataArray
        red band data

    soil_factor : float
      soil adjustment factor between -1.0 and 1.0.
      when set to zero, savi will return the same as ndvi

    Returns
    -------
    data: DataArray

    Notes:
    ------
    Algorithm References:
     - https://www.sciencedirect.com/science/article/abs/pii/003442578890106X
    """
    _check_is_dataarray(nir_agg, 'near-infrared')
    _check_is_dataarray(red_agg, 'red')

    if not red_agg.shape == nir_agg.shape:
        raise ValueError("red_agg and nir_agg expected to have equal shapes")

    if soil_factor > 1.0 or soil_factor < -1.0:
        raise ValueError("soil factor must be between (-1.0, 1.0)")

    nir_data = nir_agg.data
    red_data = red_agg.data

    if has_cuda() and use_cuda:
        griddim, blockdim = cuda_args(nir_data.shape)
        soil_factor_arr = np.array([float(soil_factor)], dtype='f4')

        out = np.empty(nir_data.shape, dtype='f4')
        out[:] = np.nan

        if use_cupy:
            import cupy
            out = cupy.asarray(out)

        _savi_gpu[griddim, blockdim](nir_data,
                                     red_data,
                                     soil_factor_arr,
                                     out)
    else:
        out = _savi(nir_agg.data, red_agg.data, soil_factor)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Example #5
0
def hillshade(agg, azimuth=225, angle_altitude=25, name='hillshade'):
    """Illuminates 2D DataArray from specific azimuth and altitude.

    Parameters
    ----------
    agg : DataArray
    altitude : int, optional (default: 30)
        Altitude angle of the sun specified in degrees.
    azimuth : int, optional (default: 315)
        The angle between the north vector and the perpendicular projection
        of the light source down onto the horizon specified in degrees.

    Returns
    -------
    Datashader Image

    Notes:
    ------
    Algorithm References:
     - http://geoexamples.blogspot.com/2014/03/shaded-relief-images-using-gdal-python.html
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, azimuth, angle_altitude)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, azimuth, angle_altitude)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, azimuth, angle_altitude)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, azimuth, angle_altitude)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #6
0
def aspect(agg: xr.DataArray, name: str = 'aspect'):
    """Returns downward slope direction in compass degrees (0 - 360) with 0 at 12 o'clock.

    Parameters
    ----------
    agg : DataArray

    Returns
    -------
    data: DataArray

    Notes:
    ------
    Algorithm References:
     - http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-aspect-works.htm#ESRI_SECTION1_4198691F8852475A9F4BC71246579FAA
     - Burrough, P. A., and McDonell, R. A., 1998. Principles of Geographical Information Systems (Oxford University Press, New York), pp 406
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #7
0
def get_xr_dataarray(
    shape, type, different_each_call=False, seed=71942, is_int=False, include_nan=False
):
    # Gaussian bump with noise.
    #
    # Valid types are "numpy", "cupy" and "rtxpy". Using "numpy" will return
    # a numpy-backed xarray DataArray. Using either of the other two will
    # return a cupy-backed DataArray but only if the required dependencies are
    # available, otherwise a NotImplementedError will be raised so that the
    # benchmark will not be run,
    #
    # Calling with different_each_call=True will ensure that each array
    # returned by this function is different by randomly changing the last
    # element. This is required for functions that create an rtxpy
    # triangulation to avoid them reusing a cached triangulation leading to
    # optimistically fast benchmark times.
    ny, nx = shape

    x = np.linspace(-180, 180, nx)
    y = np.linspace(-90, 90, ny)
    x2, y2 = np.meshgrid(x, y)
    rng = np.random.default_rng(seed)

    if is_int:
        z = rng.integers(-nx, nx, size=shape).astype(np.float32)
    else:
        z = 100.0*np.exp(-x2**2 / 5e5 - y2**2 / 2e5)
        z += rng.normal(0.0, 2.0, (ny, nx))

    if different_each_call:
        if is_int:
            z[-1, -1] = np.random.default_rng().integers(-nx, nx)
        else:
            z[-1, -1] = np.random.default_rng().normal(0.0, 2.0)

    if include_nan:
        z[0, 0] = np.nan

    if type == "numpy":
        pass
    elif type == "cupy":
        if not (has_cuda() and has_cupy()):
            raise NotImplementedError()
        import cupy
        z = cupy.asarray(z)
    elif type == "rtxpy":
        if not has_rtx():
            raise NotImplementedError()
        import cupy
        z = cupy.asarray(z)
    else:
        raise RuntimeError(f"Unrecognised type {type}")

    return xr.DataArray(z, coords=dict(y=y, x=x), dims=["y", "x"])
Example #8
0
def sipi(nir_agg, red_agg, blue_agg, name='sipi', use_cuda=True, use_cupy=True):
    """Computes Structure Insensitive Pigment Index which helpful
    in early disease detection

    Parameters
    ----------
    nir_agg : DataArray
        near-infrared band data

    green_agg : DataArray
        green band data

    Returns
    -------
    data: DataArray

    Notes:
    ------
    Algorithm References:
    https://en.wikipedia.org/wiki/Enhanced_vegetation_index
    """

    _check_is_dataarray(nir_agg, 'near-infrared')
    _check_is_dataarray(red_agg, 'red')
    _check_is_dataarray(blue_agg, 'blue')

    if not red_agg.shape == nir_agg.shape == blue_agg.shape:
        raise ValueError("input layers expected to have equal shapes")

    nir_data = nir_agg.data
    red_data = red_agg.data
    blue_data = blue_agg.data

    if has_cuda() and use_cuda:
        griddim, blockdim = cuda_args(nir_data.shape)
        out = np.empty(nir_data.shape, dtype='f4')
        out[:] = np.nan

        if use_cupy:
            import cupy
            out = cupy.asarray(out)

        _sipi_gpu[griddim, blockdim](nir_data,
                                     red_data,
                                     blue_data,
                                     out)
    else:
        out = _sipi(nir_data, red_data, blue_data)

    return DataArray(out,
                     name=name,
                     coords=nir_agg.coords,
                     dims=nir_agg.dims,
                     attrs=nir_agg.attrs)
Example #9
0
def curvature(agg, name='curvature'):
    """Compute the curvature (second derivatives) of a agg surface.

    Parameters
    ----------
    agg: xarray.xr.DataArray
        2D input agg image with shape=(height, width)

    Returns
    -------
    curvature: xarray.xr.DataArray
        Curvature image with shape=(height, width)
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)
    cellsize = (cellsize_x + cellsize_y) / 2

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, cellsize)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, cellsize)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, cellsize)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, cellsize)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #10
0
def _quantile(agg, k):
    # numpy case
    if isinstance(agg.data, np.ndarray):
        q = _run_cpu_quantile(agg.data, k)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        q = _run_cupy_quantile(agg.data, k)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data,
                                   cupy.ndarray) and is_cupy_backed(agg):
        q = _run_dask_cupy_quantile(agg.data, k)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        q = _run_dask_numpy_quantile(agg.data, k)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return q
Example #11
0
def _mean(data, excludes):
    # numpy case
    if isinstance(data, np.ndarray):
        out = _mean_numpy(data.astype(np.float), excludes)

    # cupy case
    elif has_cuda() and isinstance(data, cupy.ndarray):
        out = _mean_cupy(data.astype(cupy.float), excludes)

    # dask + cupy case
    elif has_cuda() and isinstance(data, da.Array) and \
            type(data._meta).__module__.split('.')[0] == 'cupy':
        out = _mean_dask_cupy(data.astype(cupy.float), excludes)

    # dask + numpy case
    elif isinstance(data, da.Array):
        out = _mean_dask_numpy(data.astype(np.float), excludes)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(data)))

    return out
Example #12
0
def ebbi(red_agg, swir_agg, tir_agg, name='ebbi', use_cuda=True, use_cupy=True):
    """Computes Enhanced Built-Up and Bareness Index
    Parameters
    ----------
    red_agg : DataArray
        red band data
    swir_agg : DataArray
        shortwave infrared band data
    tir_agg : DataArray
        thermal infrared band data
    Returns
    -------
    data: DataArray
    Notes:
    ------
    Algorithm References:
    https://rdrr.io/cran/LSRS/man/EBBI.html
    """

    _check_is_dataarray(red_agg, 'red')
    _check_is_dataarray(swir_agg, 'swir')
    _check_is_dataarray(tir_agg, 'thermal infrared')

    if not red_agg.shape == swir_agg.shape == tir_agg.shape:
        raise ValueError("input layers expected to have equal shapes")

    red_data = red_agg.data
    swir_data = swir_agg.data
    tir_data = tir_agg.data

    if has_cuda() and use_cuda:
        griddim, blockdim = cuda_args(red_data.shape)
        out = np.empty(red_data.shape, dtype='f4')
        out[:] = np.nan

        if use_cupy:
            import cupy
            out = cupy.asarray(out)

        _sipi_gpu[griddim, blockdim](red_data,
                                     swir_data,
                                     tir_data,
                                     out)
    else:
        out = _sipi(red_data, swir_data, tir_data)

    return DataArray(out,
                     name=name,
                     coords=red_agg.coords,
                     dims=red_agg.dims,
                     attrs=red_agg.attrs)
Example #13
0
def create_test_arr(backend='numpy'):
    W = 50
    H = 50
    data = np.zeros((H, W), dtype=np.float32)
    raster = xr.DataArray(data, dims=['y', 'x'])

    if has_cuda() and 'cupy' in backend:
        import cupy
        raster.data = cupy.asarray(raster.data)

    if 'dask' in backend:
        raster.data = da.from_array(raster.data, chunks=(10, 10))

    return raster
Example #14
0
def create_arr(data=None, H=10, W=10, backend='numpy'):
    assert (backend in ['numpy', 'cupy', 'dask'])
    if data is None:
        data = np.zeros((H, W), dtype=np.float32)
    raster = xr.DataArray(data, dims=['y', 'x'])

    if has_cuda() and 'cupy' in backend:
        import cupy
        raster.data = cupy.asarray(raster.data)

    if 'dask' in backend:
        import dask.array as da
        raster.data = da.from_array(raster.data, chunks=(10, 10))

    return raster
Example #15
0
def _bin(data, bins, new_values):
    # numpy case
    if isinstance(data, np.ndarray):
        out = _run_numpy_bin(data, np.asarray(bins), np.asarray(new_values))

    # cupy case
    elif has_cuda() and isinstance(data, cupy.ndarray):
        bins_cupy = cupy.asarray(bins, dtype='f4')
        new_values_cupy = cupy.asarray(new_values, dtype='f4')
        out = _run_cupy_bin(data, bins_cupy, new_values_cupy)

    # dask + cupy case
    elif has_cuda() and isinstance(data, da.Array) and \
            type(data._meta).__module__.split('.')[0] == 'cupy':
        bins_cupy = cupy.asarray(bins, dtype='f4')
        new_values_cupy = cupy.asarray(new_values, dtype='f4')
        out = _run_dask_cupy_bin(data, bins_cupy, new_values_cupy)

    # dask + numpy case
    elif isinstance(data, da.Array):
        out = _run_dask_numpy_bin(data, np.asarray(bins),
                                  np.asarray(new_values))

    return out
Example #16
0
def _run_normalized_ratio(arr1, arr2, use_cuda=True, use_cupy=True):

    if has_cuda() and use_cuda:
        griddim, blockdim = cuda_args(arr1.shape)
        out = np.empty(arr1.shape, dtype='f4')
        out[:] = np.nan

        if use_cupy:
            import cupy
            out = cupy.asarray(out)

        _normalized_ratio_gpu[griddim, blockdim](arr1, arr2, out)
    else:
        out = _normalized_ratio(arr1, arr2)
    
    return out
Example #17
0
def create_test_arr(arr, backend='numpy'):

    y, x = arr.shape
    raster = xa.DataArray(arr, dims=['y', 'x'])

    if backend == 'numpy':
        raster['y'] = np.linspace(0, y, y)
        raster['x'] = np.linspace(0, x, x)
        return raster

    if has_cuda() and 'cupy' in backend:
        import cupy
        raster.data = cupy.asarray(raster.data)

    if 'dask' in backend:
        raster.data = da.from_array(raster.data, chunks=(3, 3))

    return raster
Example #18
0
def create_test_raster(data,
                       backend='numpy',
                       name='myraster',
                       dims=['y', 'x'],
                       attrs=None,
                       chunks=(3, 3)):
    raster = xr.DataArray(data, name=name, dims=dims, attrs=attrs)
    # set coords for test raster
    for i, dim in enumerate(dims):
        raster[dim] = np.linspace(0, data.shape[i] - 1, data.shape[i])

    if has_cuda() and 'cupy' in backend:
        import cupy
        raster.data = cupy.asarray(raster.data)

    if 'dask' in backend:
        raster.data = da.from_array(raster.data, chunks=chunks)

    return raster
Example #19
0
def hillshade(agg: xr.DataArray,
              azimuth: int = 225,
              angle_altitude: int = 25,
              name: Optional[str] = 'hillshade') -> xr.DataArray:
    """
    Calculates, for all cells in the array, an illumination
    value of each cell based on illumination from a specific
    azimuth and altitude.

    Parameters:
    ----------
    agg: xarray.DataArray
        2D array of elevation values:
        NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array.
    altitude: int (default: 30)
        Altitude angle of the sun specified in degrees.
    azimuth: int (default: 315)
        The angle between the north vector and the perpendicular projection
        of the light source down onto the horizon specified in degrees.
    name: str, optional (default = "hillshade")
        Name of output DataArray.

    Returns:
    ----------
    data: xarray.DataArray
        2D array, of the same type as the input of calculated illumination values.

    Notes:
    ----------
    Algorithm References:
        http://geoexamples.blogspot.com/2014/03/shaded-relief-images-using-gdal-python.html
        
    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> import xrspatial

    Create Initial DataArray
    >>> agg = xr.DataArray(np.array([[0, 1, 0, 0],
    >>>                              [1, 1, 0, 0],
    >>>                              [0, 1, 2, 2],
    >>>                              [1, 0, 2, 0],
    >>>                              [0, 2, 2, 2]]),
    >>>                       dims = ["lat", "lon"])
    >>> height, width = agg.shape
    >>> _lon = np.linspace(0, width - 1, width)
    >>> _lat = np.linspace(0, height - 1, height)
    >>> agg["lon"] = _lon
    >>> agg["lat"] = _lat
    >>> print(agg)
    <xarray.DataArray (lat: 5, lon: 4)>
    array([[0, 1, 0, 0],
           [1, 1, 0, 0],
           [0, 1, 2, 2],
           [1, 0, 2, 0],
           [0, 2, 2, 2]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0

    Create Hillshade DataArray
    >>> hillshade = xrspatial.hillshade(agg)
    >>> print(hillshade)
    <xarray.DataArray 'hillshade' (lat: 5, lon: 4)>
    array([[       nan,        nan,        nan,        nan],
           [       nan, 0.54570079, 0.32044456,        nan],
           [       nan, 0.96130094, 0.53406336,        nan],
           [       nan, 0.67253318, 0.71130913,        nan],
           [       nan,        nan,        nan,        nan]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, azimuth, angle_altitude)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, azimuth, angle_altitude)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, azimuth, angle_altitude)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, azimuth, angle_altitude)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #20
0
def slope(agg: xr.DataArray,
          name: str = 'slope') -> xr.DataArray:
    """
    Returns slope of input aggregate in degrees.
    
    Parameters:
    ---------
    agg: xarray.DataArray
        2D array of elevation band data.
    name: str, optional (default = 'slope')
        name property of output xarray.DataArray

    Returns:
    ---------
    xarray.DataArray
        2D array, of the same type as the input, of calculated slope values.
        All other input attributes are preserved.

    Notes:
    ---------
    Algorithm References:
        - http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-slope-works.htm
        - Burrough, P. A., and McDonell, R. A., 1998.
          Principles of Geographical Information Systems
          (Oxford University Press, New York), pp 406
    Examples:
    ---------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> from xrspatial import slope

    Create Data Array
    >>> agg = xr.DataArray(np.array([[0, 0, 0, 0, 0, 0, 0],
    >>>                              [0, 0, 2, 4, 0, 8, 0],
    >>>                              [0, 2, 2, 4, 6, 8, 0],
    >>>                              [0, 4, 4, 4, 6, 8, 0],
    >>>                              [0, 6, 6, 6, 6, 8, 0],
    >>>                              [0, 8, 8, 8, 8, 8, 0],
    >>>                              [0, 0, 0, 0, 0, 0, 0]]),
    >>>                     dims = ["lat", "lon"],
    >>>                     attrs = dict(res = 1))
    >>> height, width = agg.shape
    >>> _lon = np.linspace(0, width - 1, width)
    >>> _lat = np.linspace(0, height - 1, height)
    >>> agg["lon"] = _lon
    >>> agg["lat"] = _lat

    Create Slope Data Array
    >>> print(slope(agg))
    <xarray.DataArray 'slope' (lat: 7, lon: 7)>
    array([[ 0,  0,  0,  0,  0,  0,  0],
           [ 0, 46, 60, 63, 73, 70,  0],
           [ 0, 60, 54, 54, 68, 67,  0],
           [ 0, 68, 60, 54, 60, 71,  0],
           [ 0, 73, 63, 60, 54, 72,  0],
           [ 0, 74, 71, 71, 72, 75,  0],
           [ 0,  0,  0,  0,  0,  0,  0]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0 4.0 5.0 6.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0 5.0 6.0
    Attributes:
        res:      1
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, cellsize_x, cellsize_y)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, cellsize_x, cellsize_y)

    # dask + cupy case
    elif has_cuda() and is_dask_cupy(agg):
        out = _run_dask_cupy(agg.data, cellsize_x, cellsize_y)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, cellsize_x, cellsize_y)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #21
0
def aspect(agg: xr.DataArray, name: Optional[str] = 'aspect') -> xr.DataArray:
    """
    Calculates the aspect value of an elevation aggregate.

    Calculates, for all cells in the array, the downward slope direction
    of each cell based on the elevation of its neighbors in a 3x3 grid.
    The value is measured clockwise in degrees with 0 and 360 at due
    north. Flat areas are given a value of -1. Values along the edges
    are not calculated.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of elevation values.
    name : str, default='aspect'
        Name of ouput DataArray.

    Returns
    -------
    aspect_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of calculated aspect values.
        All other input attributes are preserved.

    References
    ----------
        - arcgis: http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-aspect-works.htm#ESRI_SECTION1_4198691F8852475A9F4BC71246579FAA # noqa

    Examples
    --------
    .. plot::
       :include-source:

        import datashader as ds
        import matplotlib.pyplot as plt
        from xrspatial import generate_terrain, aspect

        # Create Canvas
        W = 500
        H = 300
        cvs = ds.Canvas(plot_width = W,
                        plot_height = H,
                        x_range = (-20e6, 20e6),
                        y_range = (-20e6, 20e6))

        # Generate Example Terrain
        terrain_agg = generate_terrain(canvas = cvs)

        # Edit Attributes
        terrain_agg = terrain_agg.assign_attrs(
            {
                'Description': 'Example Terrain',
                'units': 'km',
                'Max Elevation': '4000',
            }
        )
        
        terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
        terrain_agg = terrain_agg.rename('Elevation')

        # Create Aspect Aggregate Array
        aspect_agg = aspect(agg = terrain_agg, name = 'Aspect')

        # Edit Attributes
        aspect_agg = aspect_agg.assign_attrs(
            {
                'Description': 'Example Aspect',
                'units': 'deg',
            }
        )

        # Plot Terrain
        terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Terrain")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

        # Plot Aspect
        aspect_agg.plot(aspect = 2, size = 4)
        plt.title("Aspect")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

    .. sourcecode:: python

        >>> print(terrain_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
               [1285.37061171, 1282.48046696],
               [1306.02305679, 1303.40657515]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Terrain
            units:          km
            Max Elevation:  4000

    .. sourcecode:: python

        >>> print(aspect_agg[200:203, 200:202])
        <xarray.DataArray 'Aspect' (lat: 3, lon: 2)>
        array([[ 8.18582638,  8.04675084],
               [ 5.49302641,  9.86625477],
               [12.04270534, 16.87079619]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Aspect
            units:          deg
            Max Elevation:  4000
    """
    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #22
0
def hillshade(agg: xr.DataArray,
              azimuth: int = 225,
              angle_altitude: int = 25,
              name: Optional[str] = 'hillshade') -> xr.DataArray:
    """
    Calculates, for all cells in the array, an illumination value of
    each cell based on illumination from a specific azimuth and
    altitude.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of elevation values.
    altitude : int, default=25
        Altitude angle of the sun specified in degrees.
    azimuth : int, default=225
        The angle between the north vector and the perpendicular
        projection of the light source down onto the horizon
        specified in degrees.
    name : str, default='hillshade'
        Name of output DataArray.

    Returns
    -------
    hillshade_agg : xarray.DataArray, of same type as `agg`
        2D aggregate array of illumination values.

    References
    ----------
        - GeoExamples: http://geoexamples.blogspot.com/2014/03/shaded-relief-images-using-gdal-python.html # noqa

    Examples
    --------
    .. plot::
       :include-source:

        import datashader as ds
        import matplotlib.pyplot as plt
        from xrspatial import generate_terrain, hillshade

        # Create Canvas
        W = 500
        H = 300
        cvs = ds.Canvas(plot_width = W,
                        plot_height = H,
                        x_range = (-20e6, 20e6),
                        y_range = (-20e6, 20e6))

        # Generate Example Terrain
        terrain_agg = generate_terrain(canvas = cvs)

        # Edit Attributes
        terrain_agg = terrain_agg.assign_attrs(
            {
                'Description': 'Example Terrain',
                'units': 'km',
                'Max Elevation': '4000',
            }
        )
        
        terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
        terrain_agg = terrain_agg.rename('Elevation')

        # Create Hillshade Aggregate Array
        hillshade_agg = hillshade(agg = terrain_agg, name = 'Illumination')

        # Edit Attributes
        hillshade_agg = hillshade_agg.assign_attrs({'Description': 'Example Hillshade',
                                                    'units': ''})

        # Plot Terrain
        terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Terrain")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

        # Plot Terrain
        hillshade_agg.plot(cmap = 'Greys', aspect = 2, size = 4)
        plt.title("Hillshade")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

    .. sourcecode:: python

        >>> print(terrain_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
               [1285.37061171, 1282.48046696],
               [1306.02305679, 1303.40657515]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Terrain
            units:          km
            Max Elevation:  4000

        >>> print(hillshade_agg[200:203, 200:202])
        <xarray.DataArray 'Illumination' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
               [1285.37061171, 1282.48046696],
               [1306.02305679, 1303.40657515]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Hillshade
            units:
            Max Elevation:  4000
    """
    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, azimuth, angle_altitude)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, azimuth, angle_altitude)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, azimuth, angle_altitude)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, azimuth, angle_altitude)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #23
0
def aspect(agg: xr.DataArray, name: Optional[str] = 'aspect') -> xr.DataArray:
    """
    Calculates, for all cells in the array,
    the downward slope direction of each cell
    based on the elevation of its neighbors in a 3x3 grid.
    The value is measured clockwise in degrees with 0 and 360 at due north.
    Flat areas are given a value of -1.
    Values along the edges are not calculated.

    Parameters:
    ----------
    agg: xarray.DataArray
        2D array of elevation values. NumPy, CuPy, NumPy-backed Dask,
        or Cupy-backed Dask array.
    name: str, optional (default = "aspect")
        Name of ouput DataArray.

    Returns:
    ----------
    xarray.DataArray
        2D array, of the same type as the input, of calculated aspect values.
        All other input attributes are preserved.

    Notes:
    ----------
    Algorithm References:
        - http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-aspect-works.htm#ESRI_SECTION1_4198691F8852475A9F4BC71246579FAA
        - Burrough, P. A., and McDonell, R. A., 1998.
          Principles of Geographical Information Systems
          (Oxford University Press, New York), pp 406

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> import xrspatial

    Create Elevation DataArray
    >>> agg = xr.DataArray(np.array([[0, 1, 0, 0],
    >>>                              [1, 1, 0, 0],
    >>>                              [0, 1, 2, 2],
    >>>                              [1, 0, 2, 0],
    >>>                              [0, 2, 2, 2]]),
    >>>                    dims = ["lat", "lon"])
    >>> height, width = agg.shape
    >>> _lon = np.linspace(0, width - 1, width)
    >>> _lat = np.linspace(0, height - 1, height)
    >>> agg["lon"] = _lon
    >>> agg["lat"] = _lat

    Create Aspect DataArray
    >>> aspect = xrspatial.aspect(agg)
    >>> print(aspect)
    <xarray.DataArray 'aspect' (lat: 5, lon: 4)>
    array([[nan,  nan,  nan,  nan],
           [nan,   0.,  18.43494882,  nan],
           [nan, 270., 341.56505118,  nan],
           [nan, 288.43494882, 315.,  nan],
           [nan,  nan,  nan,  nan]])
    Coordinates:
    * lon      (lon) float64 0.0 1.0 2.0 3.0
    * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0

    Terrain Example: https://makepath.github.io/xarray-spatial/assets/examples/user-guide.html
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #24
0
def convolve_2d(data, kernel):
    """
    Calculates, for all inner cells of an array, the 2D convolution of
    each cell via Numba. To account for edge cells, a pad can be added
    to the image array. Convolution is frequently used for image
    processing, such as smoothing, sharpening, and edge detection of
    images by eliminating spurious data or enhancing features in the
    data.

    Parameters
    ----------
    image : xarray.DataArray
        2D array of values to processed and padded.
    kernel : array-like object
        Impulse kernel, determines area to apply impulse function for
        each cell.
    pad : bool, default=True
        To compute edges set to True.
    use-cuda : bool, default=True
        For parallel computing set to True.

    Returns
    -------
    convolve_agg : numpy.ndarray
        2D array representation of the impulse function.

    Examples
    --------
    .. plot::
       :include-source:

        import numpy as np
        import xarray as xr
        from xrspatial import focal
        from xrspatial.convolution import convolve_2d

        # Create Data Array
        agg = xr.DataArray(np.array([[0, 0, 0, 0, 0, 0, 0],
                                     [0, 0, 2, 4, 0, 8, 0],
                                     [0, 2, 2, 4, 6, 8, 0],
                                     [0, 4, 4, 4, 6, 8, 0],
                                     [0, 6, 6, 6, 6, 8, 0],
                                     [0, 8, 8, 8, 8, 8, 0],
                                     [0, 0, 0, 0, 0, 0, 0]]),
                            dims = ["lat", "lon"],
                            attrs = dict(res = 1))
        height, width = agg.shape
        _lon = np.linspace(0, width - 1, width)
        _lat = np.linspace(0, height - 1, height)
        agg["lon"] = _lon
        agg["lat"] = _lat

        # Create Kernel
        kernel = focal.circle_kernel(1, 1, 1)

        # Create Convolution Data Array
        convolve_agg = convolve_2d(image = agg, kernel = kernel)

    .. sourcecode:: python

        >>> print(convolve_agg)
        [[ 0.  0.  4.  8.  0. 16.  0.]
        [ 0.  4.  8. 10. 18. 16. 16.]
        [ 4.  8. 14. 20. 24. 30. 16.]
        [ 8. 16. 20. 24. 30. 30. 16.]
        [12. 24. 30. 30. 34. 30. 16.]
        [16. 22. 30. 30. 30. 24. 16.]
        [ 0. 16. 16. 16. 16. 16.  0.]]
    """
    # numpy case
    if isinstance(data, np.ndarray):
        out = _convolve_2d_numpy(data, kernel)

    # cupy case
    elif has_cuda() and isinstance(data, cupy.ndarray):
        out = _convolve_2d_cupy(data, kernel)

    # dask + cupy case
    elif has_cuda() and isinstance(data, da.Array) and \
            type(data._meta).__module__.split('.')[0] == 'cupy':
        out = _convolve_2d_dask_cupy(data, kernel)

    # dask + numpy case
    elif isinstance(data, da.Array):
        out = _convolve_2d_dask_numpy(data, kernel)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(data)))

    return out
Example #25
0
def hotspots(raster, kernel):
    """
    Identify statistically significant hot spots and cold spots in an
    input raster. To be a statistically significant hot spot, a feature
    will have a high value and be surrounded by other features with
    high values as well.
    Neighborhood of a feature defined by the input kernel, which
    currently support a shape of circle, annulus, or custom kernel.

    The result should be a raster with the following 7 values:
        - 90 for 90% confidence high value cluster
        - 95 for 95% confidence high value cluster
        - 99 for 99% confidence high value cluster
        - 90 for 90% confidence low value cluster
        - 95 for 95% confidence low value cluster
        - 99 for 99% confidence low value cluster
        - 0 for no significance

    Parameters
    ----------
    raster : xarray.DataArray
        2D Input raster image with `raster.shape` = (height, width).
    kernel : Numpy Array
        2D array where values of 1 indicate the kernel.

    Returns
    -------
    hotspots_agg : xarray.DataArray of same type as `raster`
        2D array of hotspots with values indicating confidence level.

    Examples
    --------
    .. plot::
       :include-source:

        import datashader as ds
        import matplotlib.pyplot as plt
        from xrspatial import generate_terrain, aspect
        from xrspatial.convolution import circle_kernel
        from xrspatial.focal import hotspots

        # Create Canvas
        W = 500
        H = 300
        cvs = ds.Canvas(plot_width = W,
                        plot_height = H,
                        x_range = (-20e6, 20e6),
                        y_range = (-20e6, 20e6))

        # Generate Example Terrain
        terrain_agg = generate_terrain(canvas = cvs)

        # Edit Attributes
        terrain_agg = terrain_agg.assign_attrs(
            {
                'Description': 'Example Terrain',
                'units': 'km',
                'Max Elevation': '4000',
            }
        )

        terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
        terrain_agg = terrain_agg.rename('Elevation')

        # Create Kernel
        kernel = circle_kernel(10, 10, 100)

        # Create Hotspots Aggregate array
        hotspots_agg = hotspots(raster = terrain_agg,
                                kernel = kernel)

        # Edit Attributes
        hotspots_agg = hotspots_agg.rename('Significance')
        hotspots_agg = hotspots_agg.assign_attrs(
            {
                'Description': 'Example Hotspots',
                'units': '%',
            }
        )

        # Plot Terrain
        terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Terrain")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

        # Plot Hotspots
        hotspots_agg.plot(aspect = 2, size = 4)
        plt.title("Hotspots")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

    .. sourcecode:: python

        >>> print(terrain_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
               [1285.37061171, 1282.48046696],
               [1306.02305679, 1303.40657515]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Terrain
            units:          km
            Max Elevation:  4000

        >>> print(hotspots_agg[200:203, 200:202])
        <xarray.DataArray 'Significance' (lat: 3, lon: 2)>
        array([[0, 0],
               [0, 0],
               [0, 0]], dtype=int8)
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Hotspots
            units:          %
            Max Elevation:  4000
    """
    # validate raster
    if not isinstance(raster, DataArray):
        raise TypeError("`raster` must be instance of DataArray")

    if raster.ndim != 2:
        raise ValueError("`raster` must be 2D")

    # numpy case
    if isinstance(raster.data, np.ndarray):
        out = _hotspots_numpy(raster, kernel)

    # cupy case
    elif has_cuda() and isinstance(raster.data, cupy.ndarray):
        out = _hotspots_cupy(raster, kernel)

    # dask + cupy case
    elif has_cuda() and isinstance(raster.data, da.Array) and \
            is_cupy_backed(raster):
        raise NotImplementedError()

    # dask + numpy case
    elif isinstance(raster.data, da.Array):
        out = _hotspots_dask_numpy(raster, kernel)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(raster.data)))

    return DataArray(out,
                     coords=raster.coords,
                     dims=raster.dims,
                     attrs=raster.attrs)
Example #26
0
def curvature(agg: xr.DataArray,
              name: Optional[str] = 'curvature') -> xr.DataArray:
    """
    Calculates, for all cells in the array, the curvature
    (second derivative) of each cell based on the elevation
    of its neighbors in a 3x3 grid. A positive curvature
    indicates the surface is upwardly convex. A negative
    value indicates it is upwardly concave. A value of 0
    indicates a flat surface.

    Units of the curvature output raster are one hundredth (1/100) of a z-unit.

    Parameters:
    ----------
    agg: xarray.DataArray
        2D array of elevation values
        NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array.
        Must contain "res" attribute.
    name: str (default = "curvature")
        Name of output DataArray.

    Returns:
    ----------
    curvature: xarray.DataArray
        2D array, of the same type as the input, of calculated curvature values
        All other input attributes are preserved.

    Notes:
    ----------
    Algorithm References:
        - https://pro.arcgis.com/en/pro-app/latest/tool-reference/spatial-analyst/how-curvature-works.htm

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> from xrspatial import curvature

        Create Initial DataArray
    >>> agg = xr.DataArray(np.array([[0, 1, 0, 0],
    >>>                              [1, 1, 0, 0],
    >>>                              [0, 1, 2, 2],
    >>>                              [1, 0, 2, 0],
    >>>                              [0, 2, 2, 2]]),
    >>>                    dims = ["lat", "lon"],
    >>>                    attrs = dict(res = 1))
    >>> height, width = agg.shape
    >>> _lon = np.linspace(0, width - 1, width)
    >>> _lat = np.linspace(0, height - 1, height)
    >>> agg["lon"] = _lon
    >>> agg["lat"] = _lat
    >>> print(agg)
    <xarray.DataArray (lat: 5, lon: 4)>
    array([[0, 1, 0, 0],
           [1, 1, 0, 0],
           [0, 1, 2, 2],
           [1, 0, 2, 0],
           [0, 2, 2, 2]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0
    Attributes:
        res:      1

    Create Curvature DataArray
    >>> print(curvature(agg))
    <xarray.DataArray 'curvature' (lat: 5, lon: 4)>
    array([[  nan,   nan,   nan,   nan],
           [  nan,  100., -300.,   nan],
           [  nan,  100.,  300.,   nan],
           [  nan, -600.,  400.,   nan],
           [  nan,   nan,   nan,   nan]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0 4.0
    Attributes:
        res:      1
    """

    cellsize_x, cellsize_y = get_dataarray_resolution(agg)
    cellsize = (cellsize_x + cellsize_y) / 2

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy(agg.data, cellsize)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy(agg.data, cellsize)

    # dask + cupy case
    elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
        out = _run_dask_cupy(agg.data, cellsize)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy(agg.data, cellsize)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return xr.DataArray(out,
                        name=name,
                        coords=agg.coords,
                        dims=agg.dims,
                        attrs=agg.attrs)
Example #27
0
def equal_interval(agg: xr.DataArray,
                   k: int = 5,
                   name: Optional[str] = 'equal_interval') -> xr.DataArray:
    """
    Groups data for array (agg) by distributing values into at equal intervals.
    The result is an xarray.DataArray.

    Parameters:
    ----------
    agg: xarray.DataArray
        2D array of values to bin.
        NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
    k: int
        Number of classes to be produced.
    name: str, optional (default = "equal_interval")
        Name of output aggregate.

    Returns:
    ----------
    equal_interval_agg: xarray.DataArray
        2D array, of the same type as the input, of class allocations.

    Notes:
    ----------
    Intervals defined to have equal width:

    Algorithm References:
    ----------
    PySal:
    - https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#EqualInterval # noqa
    SciKit:
    - https://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html#sphx-glr-auto-examples-classification-plot-classifier-comparison-py # noqa

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> from xrspatial.classify import equal_interval, natural_breaks

        Create Initial DataArray
    >>> np.random.seed(1)
    >>> agg = xr.DataArray(np.random.randint(2, 8, (4, 4)),
    >>>                    dims = ["lat", "lon"])
    >>> height, width = agg.shape
    >>> _lon = np.linspace(0, width - 1, width)
    >>> _lat = np.linspace(0, height - 1, height)
    >>> agg["lon"] = _lon
    >>> agg["lat"] = _lat
    >>> print(agg)
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[7, 5, 6, 2],
           [3, 5, 7, 2],
           [2, 3, 6, 7],
           [6, 3, 4, 6]])
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0

    Create Equal Interval DataArray
    >>> equal_interval_agg = equal_interval(agg, k = 5)
    >>> print(equal_interval_agg)
    <xarray.DataArray 'equal_interval' (lat: 4, lon: 4)>
    array([[4., 2., 3., 0.],
           [0., 2., 4., 0.],
           [0., 0., 3., 4.],
           [3., 0., 1., 3.]], dtype=float32)
    Coordinates:
      * lon      (lon) float64 0.0 1.0 2.0 3.0
      * lat      (lat) float64 0.0 1.0 2.0 3.0
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy_equal_interval(agg.data, k)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy_equal_interval(agg.data, k)

    # dask + cupy case
    elif has_cuda() and \
            isinstance(agg.data, cupy.ndarray) and \
            is_cupy_backed(agg):
        out = _run_dask_cupy_equal_interval(agg.data, k)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy_equal_interval(agg.data, k)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return DataArray(out,
                     name=name,
                     coords=agg.coords,
                     dims=agg.dims,
                     attrs=agg.attrs)
Example #28
0
def natural_breaks(agg: xr.DataArray,
                   num_sample: Optional[int] = None,
                   name: Optional[str] = 'natural_breaks',
                   k: int = 5) -> xr.DataArray:
    """
    Groups data for array (agg) by distributing
    values using the Jenks Natural Breaks or k-means
    clustering method. Values are grouped so that
    similar values are placed in the same group and
    space between groups is maximized.
    The result is an xarray.DataArray.

    Parameters:
    ----------
    agg: xarray.DataArray
        2D array of values to bin.
        NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
    num_sample: int (optional)
        Number of sample data points used to fit the model.
        Natural Breaks (Jenks) classification is indeed O(n²) complexity,
        where n is the total number of data points, i.e: agg.size
        When n is large, we should fit the model on a small sub-sample
        of the data instead of using the whole dataset.
    k: int (default = 5)
        Number of classes to be produced.
    name: str, optional (default = "natural_breaks")
        Name of output aggregate.

    Returns:
    ----------
    natural_breaks_agg: xarray.DataArray
        2D array, of the same type as the input, of class allocations.

    Algorithm References:
    ----------
    Map Classify:
    - https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#NaturalBreaks # noqa
    perrygeo:
    - https://github.com/perrygeo/jenks/blob/master/jenks.pyx

    Examples:
    ----------
    Imports
    >>> import numpy as np
    >>> import xarray as xr
    >>> from xrspatial.classify import natural_breaks

    Create DataArray
    >>> np.random.seed(0)
    >>> agg = xr.DataArray(np.random.rand(4,4),
                                    dims = ["lat", "lon"])
    >>> height, width = agg.shape
    >>> _lat = np.linspace(0, height - 1, height)
    >>> _lon = np.linspace(0, width - 1, width)
    >>> agg["lat"] = _lat
    >>> agg["lon"] = _lon
    >>> print(agg)
    <xarray.DataArray (lat: 4, lon: 4)>
    array([[0.5488135 , 0.71518937, 0.60276338, 0.54488318],
           [0.4236548 , 0.64589411, 0.43758721, 0.891773  ],
           [0.96366276, 0.38344152, 0.79172504, 0.52889492],
            [0.56804456, 0.92559664, 0.07103606, 0.0871293 ]])
    Coordinates:
    * lon      (lon) float64 0.0 1.0 2.0 3.0
    * lat      (lat) float64 0.0 1.0 2.0 3.0

    Create Natural Breaks Aggregate
    >>> natural_breaks_agg = natural_breaks(agg, k = 5)
    >>> print(natural_breaks_agg)
    <xarray.DataArray 'natural_breaks' (lat: 4, lon: 4)>
    array([[2., 3., 2., 2.],
           [1., 2., 1., 4.],
           [4., 1., 3., 2.],
           [2., 4., 0., 0.]], dtype=float32)
    Coordinates:
      * lat      (lat) float64 0.0 1.0 2.0 3.0
      * lon      (lon) float64 0.0 1.0 2.0 3.0
    """

    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy_natural_break(agg.data, num_sample, k)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy_natural_break(agg.data, num_sample, k)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return DataArray(out,
                     name=name,
                     coords=agg.coords,
                     dims=agg.dims,
                     attrs=agg.attrs)
Example #29
0
def natural_breaks(agg: xr.DataArray,
                   num_sample: Optional[int] = None,
                   name: Optional[str] = 'natural_breaks',
                   k: int = 5) -> xr.DataArray:
    """
    Reclassifies data for array `agg` into new values based on Natural
    Breaks or K-Means clustering method. Values are grouped so that
    similar values are placed in the same group and space between
    groups is maximized.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of values to be reclassified.
    num_sample : int, default=None
        Number of sample data points used to fit the model.
        Natural Breaks (Jenks) classification is indeed O(n²) complexity,
        where n is the total number of data points, i.e: `agg.size`
        When n is large, we should fit the model on a small sub-sample
        of the data instead of using the whole dataset.
    k : int, default=5
        Number of classes to be produced.
    name : str, default='natural_breaks'
        Name of output aggregate.

    Returns
    -------
    natural_breaks_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of natural break allocations.
        All other input attributes are preserved.

    References
    ----------
        - PySAL: https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#NaturalBreaks # noqa
        - jenks: https://github.com/perrygeo/jenks/blob/master/jenks.pyx

    Examples
    --------
    .. plot::
       :include-source:

        import datashader as ds
        import matplotlib.pyplot as plt
        from xrspatial import generate_terrain
        from xrspatial.classify import natural_breaks

        # Create Canvas
        W = 500
        H = 300
        cvs = ds.Canvas(plot_width = W,
                        plot_height = H,
                        x_range = (-20e6, 20e6),
                        y_range = (-20e6, 20e6))

        # Generate Example Terrain
        terrain_agg = generate_terrain(canvas = cvs)

        # Edit Attributes
        terrain_agg = terrain_agg.assign_attrs(
            {
                'Description': 'Example Terrain',
                'units': 'km',
                'Max Elevation': '4000',
            }
        )
        
        terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
        terrain_agg = terrain_agg.rename('Elevation')

        # Create Natural Breaks Aggregate Array
        natural_breaks_agg = natural_breaks(agg = terrain_agg, name = 'Elevation')

        # Edit Attributes
        natural_breaks_agg = natural_breaks_agg.assign_attrs({'Description': 'Example Natural Breaks'})

        # Plot Terrain
        terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Terrain")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

        # Plot Natural Breaks
        natural_breaks_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Natural Breaks")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

    .. sourcecode:: python

        >>> print(terrain_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
               [1285.37061171, 1282.48046696],
               [1306.02305679, 1303.40657515]])
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Terrain
            units:          km
            Max Elevation:  4000

        >>> print(natural_breaks_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1., 1.],
               [1., 1.],
               [1., 1.]], dtype=float32)
        Coordinates:
          * lon      (lon) float64 -3.96e+06 -3.88e+06
          * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Natural Breaks
            units:          km
            Max Elevation:  4000
    """
    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy_natural_break(agg.data, num_sample, k)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy_natural_break(agg.data, num_sample, k)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return DataArray(out,
                     name=name,
                     coords=agg.coords,
                     dims=agg.dims,
                     attrs=agg.attrs)
Example #30
0
def equal_interval(agg: xr.DataArray,
                   k: int = 5,
                   name: Optional[str] = 'equal_interval') -> xr.DataArray:
    """
    Reclassifies data for array `agg` into new values based on intervals
    of equal width.

    Parameters
    ----------
    agg : xarray.DataArray
        2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
        of values to be reclassified.
    k : int, default=5
        Number of classes to be produced.
    name : str, default='equal_interval'
        Name of output aggregate.

    Returns
    -------
    equal_interval_agg : xarray.DataArray of the same type as `agg`
        2D aggregate array of equal interval allocations.
        All other input attributes are preserved.

    References
    ----------
        - PySAL: https://pysal.org/mapclassify/_modules/mapclassify/classifiers.html#EqualInterval # noqa
        - scikit-learn: https://scikit-learn.org/stable/auto_examples/classification/plot_classifier_comparison.html#sphx-glr-auto-examples-classification-plot-classifier-comparison-py # noqa

    Examples
    --------
    .. plot::
       :include-source:

        import datashader as ds
        import matplotlib.pyplot as plt
        from xrspatial import generate_terrain
        from xrspatial.classify import equal_interval

        # Create Canvas
        W = 500
        H = 300
        cvs = ds.Canvas(plot_width = W,
                        plot_height = H,
                        x_range = (-20e6, 20e6),
                        y_range = (-20e6, 20e6))

        # Generate Example Terrain
        terrain_agg = generate_terrain(canvas = cvs)

        # Edit Attributes
        terrain_agg = terrain_agg.assign_attrs(
            {
                'Description': 'Example Terrain',
                'units': 'km',
                'Max Elevation': '4000',
            }
        )
        
        terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
        terrain_agg = terrain_agg.rename('Elevation')

        # Create Equal Interval Aggregate Array
        equal_interval_agg = equal_interval(agg = terrain_agg, name = 'Elevation')

        # Edit Attributes
        equal_interval_agg = equal_interval_agg.assign_attrs({'Description': 'Example Equal Interval'})

        # Plot Terrain
        terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Terrain")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

        # Plot Equal Interval
        equal_interval_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
        plt.title("Equal Interval")
        plt.ylabel("latitude")
        plt.xlabel("longitude")

    .. sourcecode:: python

        >>> print(terrain_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1264.02249454, 1261.94748873],
            [1285.37061171, 1282.48046696],
            [1306.02305679, 1303.40657515]])
        Coordinates:
        * lon      (lon) float64 -3.96e+06 -3.88e+06
        * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Terrain
            units:          km
            Max Elevation:  4000

    .. sourcecode:: python

        >>> print(equal_interval_agg[200:203, 200:202])
        <xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
        array([[1., 1.],
            [1., 1.],
            [1., 1.]], dtype=float32)
        Coordinates:
        * lon      (lon) float64 -3.96e+06 -3.88e+06
        * lat      (lat) float64 6.733e+06 6.867e+06 7e+06
        Attributes:
            res:            1
            Description:    Example Equal Interval
            units:          km
            Max Elevation:  4000
    """
    # numpy case
    if isinstance(agg.data, np.ndarray):
        out = _run_numpy_equal_interval(agg.data, k)

    # cupy case
    elif has_cuda() and isinstance(agg.data, cupy.ndarray):
        out = _run_cupy_equal_interval(agg.data, k)

    # dask + cupy case
    elif has_cuda() and \
            isinstance(agg.data, cupy.ndarray) and \
            is_cupy_backed(agg):
        out = _run_dask_cupy_equal_interval(agg.data, k)

    # dask + numpy case
    elif isinstance(agg.data, da.Array):
        out = _run_dask_numpy_equal_interval(agg.data, k)

    else:
        raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))

    return DataArray(out,
                     name=name,
                     coords=agg.coords,
                     dims=agg.dims,
                     attrs=agg.attrs)