예제 #1
0
def hyper_min_median_max(udf_data: UdfData):
    """Compute the min, median and max of the time dimension of a hyper cube

    Hypercubes with time dimensions are required. The min, median and max reduction of th time axis will be applied
    to all hypercube dimensions.

    Args:
        udf_data (UdfData): The UDF data object that contains raster and vector tiles as well as hypercubes
        and structured data.

    Returns:
        This function will not return anything, the UdfData object "udf_data" must be used to store the resulting
        data.

    """
    # Iterate over each tile
    cube_list = []
    for cube in udf_data.get_datacube_list():
        min = cube.array.min(dim="t")
        median = cube.array.median(dim="t")
        max = cube.array.max(dim="t")

        min.name = cube.id + "_min"
        median.name = cube.id + "_median"
        max.name = cube.id + "_max"

        cube_list.append(XarrayDataCube(array=min))
        cube_list.append(XarrayDataCube(array=median))
        cube_list.append(XarrayDataCube(array=max))

    udf_data.set_datacube_list(cube_list)
예제 #2
0
def rct_stats(udf_data: UdfData):
    """Compute univariate statistics for each hypercube

    Args:
        udf_data (UdfData): The UDF data object that contains raster and vector tiles

    Returns:
        This function will not return anything, the UdfData object "udf_data" must be used to store the resulting
        data.

    """
    # The dictionary that stores the statistical data
    stats = {}
    # Iterate over each raster collection cube and compute statistical values
    for cube in udf_data.get_datacube_list():
        # make sure to cast the values to floats, otherwise they are not serializable
        stats[cube.id] = dict(sum=float(cube.array.sum()),
                              mean=float(cube.array.mean()),
                              min=float(cube.array.min()),
                              max=float(cube.array.max()))
    # Create the structured data object
    sd = StructuredData(
        data=stats,
        type="dict",
        description=
        "Statistical data sum, min, max and mean for each raster collection cube as dict",
    )
    # Remove all collections and set the StructuredData list
    udf_data.set_datacube_list(None)
    udf_data.set_structured_data_list([
        sd,
    ])
예제 #3
0
def hyper_map_fabs(udf_data: UdfData):
    """Compute the absolute values of each hyper cube in the provided data

    Args:
        udf_data (UdfData): The UDF data object that contains raster and vector tiles as well as hypercubes
        and structured data.

    Returns:
        This function will not return anything, the UdfData object "udf_data" must be used to store the resulting
        data.

    """
    # Iterate over each tile
    cube_list = []
    for cube in udf_data.get_datacube_list():
        result = numpy.fabs(cube.array)
        result.name = cube.id + "_fabs"
        cube_list.append(XarrayDataCube(array=result))
    udf_data.set_datacube_list(cube_list)
예제 #4
0
def hyper_ndvi(udf_data: UdfData):
    """Compute the NDVI based on RED and NIR hypercubes

    Hypercubes with ids "red" and "nir" are required. The NDVI computation will be applied
    to all hypercube dimensions.

    Args:
        udf_data (UdfData): The UDF data object that contains raster and vector tiles as well as hypercubes
        and structured data.

    Returns:
        This function will not return anything, the UdfData object "udf_data" must be used to store the resulting
        data.

    """
    red = None
    nir = None

    # Iterate over each tile
    for cube in udf_data.get_datacube_list():
        if "red" in cube.id.lower():
            red = cube
        if "nir" in cube.id.lower():
            nir = cube
    if red is None:
        raise Exception("Red hypercube is missing in input")
    if nir is None:
        raise Exception("Nir hypercube is missing in input")

    ndvi = (nir.array - red.array) / (nir.array + red.array)
    ndvi.name = "NDVI"

    hc = XarrayDataCube(array=ndvi)
    udf_data.set_datacube_list([
        hc,
    ])