def process(geom_data, raster_data, process_kwargs):
        if process_kwargs.get("empty"):
            return {
                "features": gpd.GeoDataFrame([]),
                "projection": process_kwargs["projection"],
            }
        elif process_kwargs["mode"] == "extent":
            return geom_data

        features = geom_data["features"]
        if len(features) == 0:
            return geom_data

        result = features.copy()

        # transform the features into the aggregation projection
        req_srs = process_kwargs["req_srs"]
        agg_srs = process_kwargs["agg_srs"]

        agg_geometries = utils.geoseries_transform(features["geometry"],
                                                   req_srs, agg_srs)

        statistic = process_kwargs["statistic"]
        percentile = utils.parse_percentile_statistic(statistic)
        if percentile:
            statistic = "percentile"
            agg_func = partial(AggregateRaster.STATISTICS[statistic]["func"],
                               qval=percentile)
        else:
            agg_func = AggregateRaster.STATISTICS[statistic]["func"]

        extensive = AggregateRaster.STATISTICS[statistic]["extensive"]
        result_column = process_kwargs["result_column"]

        # this is only there for the AggregateRasterAboveThreshold
        threshold_name = process_kwargs.get("threshold_name")
        if threshold_name:
            # get the threshold, appending NaN for unlabeled pixels
            threshold_values = np.empty((len(features) + 1, ), dtype="f4")
            threshold_values[:-1] = features[threshold_name].values
            threshold_values[-1] = np.nan
        else:
            threshold_values = None

        # investigate the raster data
        if raster_data is None:
            values = no_data_value = None
        else:
            values = raster_data["values"]
            no_data_value = raster_data["no_data_value"]
        if values is None or np.all(values == no_data_value):  # skip the rest
            result[result_column] = 0 if extensive else np.nan
            return {"features": result, "projection": req_srs}
        depth, height, width = values.shape

        pixel_size = process_kwargs["pixel_size"]
        actual_pixel_size = process_kwargs["actual_pixel_size"]

        # process in groups of disjoint subsets of the features
        agg = np.full((depth, len(features)), np.nan, dtype="f4")
        for select in bucketize(features.bounds.values):
            rasterize_result = utils.rasterize_geoseries(
                agg_geometries.iloc[select],
                process_kwargs["agg_bbox"],
                agg_srs,
                height,
                width,
                values=np.asarray(select, dtype=np.int32),  # GDAL needs int32
            )
            labels = rasterize_result["values"][0]

            # if there is a threshold, generate a raster with thresholds
            if threshold_name:
                # mode="clip" ensures that unlabeled cells use the appended NaN
                thresholds = np.take(threshold_values, labels, mode="clip")
            else:
                thresholds = None

            for frame_no, frame in enumerate(values):
                # limit statistics to active pixels
                active = frame != no_data_value
                # if there is a threshold, mask the frame
                if threshold_name:
                    valid = ~np.isnan(thresholds)  # to suppress warnings
                    active[~valid] = False  # no threshold -> no aggregation
                    active[valid] &= frame[valid] >= thresholds[valid]

                # if there is no single active value: do not aggregate
                if not active.any():
                    continue

                # select features that actually have data
                # (min, max, median, and percentile cannot handle it otherwise)
                active_labels = labels[active]
                select_and_active = list(
                    set(np.unique(active_labels)) & set(select))

                if not select_and_active:
                    continue

                agg[frame_no][select_and_active] = agg_func(
                    1 if statistic == "count" else frame[active],
                    labels=active_labels,
                    index=select_and_active,
                )

        if extensive:  # sum and count
            agg[~np.isfinite(agg)] = 0
            # extensive aggregations have to be scaled
            if actual_pixel_size != pixel_size:
                agg *= (actual_pixel_size / pixel_size)**2
        else:
            agg[~np.isfinite(agg)] = np.nan  # replaces inf by nan

        if depth == 1:
            result[result_column] = agg[0]
        else:
            # store an array in a dataframe cell: set each cell with [np.array]
            result[result_column] = [[x] for x in agg.T]

        return {"features": result, "projection": req_srs}
    def process(geom_data, raster_data, process_kwargs):
        if process_kwargs.get("empty"):
            return {
                "features": gpd.GeoDataFrame([]),
                "projection": process_kwargs["projection"],
            }
        elif process_kwargs["mode"] == "extent":
            return geom_data

        features = geom_data["features"]
        if len(features) == 0:
            return geom_data

        result = features.copy()

        # transform the features into the aggregation projection
        req_srs = process_kwargs["req_srs"]
        agg_srs = process_kwargs["agg_srs"]

        agg_geometries = utils.geoseries_transform(
            features["geometry"],
            req_srs,
            agg_srs,
        )

        statistic = process_kwargs["statistic"]
        percentile = utils.parse_percentile_statistic(statistic)
        if percentile:
            statistic = "percentile"
            agg_func = partial(AggregateRaster.STATISTICS[statistic]["func"],
                               qval=percentile)
        else:
            agg_func = AggregateRaster.STATISTICS[statistic]["func"]

        extensive = AggregateRaster.STATISTICS[statistic]["extensive"]
        result_column = process_kwargs["result_column"]

        # this is only there for the AggregateRasterAboveThreshold
        threshold_name = process_kwargs.get("threshold_name")

        # investigate the raster data
        if raster_data is None:
            values = no_data_value = None
        else:
            values = raster_data["values"]
            no_data_value = raster_data["no_data_value"]
        if values is None or np.all(values == no_data_value):  # skip the rest
            result[result_column] = 0 if extensive else np.nan
            return {"features": result, "projection": req_srs}
        depth, height, width = values.shape

        pixel_size = process_kwargs["pixel_size"]
        actual_pixel_size = process_kwargs["actual_pixel_size"]

        # process in groups of disjoint subsets of the features
        agg = np.full((depth, len(features)), np.nan, dtype="f4")
        for select in bucketize(features.bounds.values):
            agg_geometries_bucket = agg_geometries.iloc[select]
            index = features.index[select]

            rasterize_result = utils.rasterize_geoseries(
                agg_geometries_bucket,
                process_kwargs["agg_bbox"],
                agg_srs,
                height,
                width,
                values=index,
            )
            labels = rasterize_result["values"][0]

            # if there is a threshold, generate a raster with thresholds
            if threshold_name:
                thresholds = features.loc[labels.ravel(),
                                          threshold_name].values.reshape(
                                              labels.shape)
            else:
                thresholds = None

            for frame_no, frame in enumerate(values):
                # limit statistics to active pixels
                active = frame != no_data_value
                # if there is a threshold, mask the frame
                if threshold_name:
                    valid = ~np.isnan(thresholds)  # to suppress warnings
                    active[~valid] = False  # no threshold -> no aggregation
                    active[valid] &= frame[valid] >= thresholds[valid]

                # if there is no single active value: do not aggregate
                if not active.any():
                    continue

                with warnings.catch_warnings():
                    # we may get divide by 0 if any geometry does not contain
                    # any 'active' values
                    warnings.simplefilter("ignore")
                    agg[frame_no][select] = agg_func(
                        1 if statistic == "count" else frame[active],
                        labels=labels[active],
                        index=index,
                    )

        if extensive:  # sum and count
            agg[~np.isfinite(agg)] = 0
            # extensive aggregations have to be scaled
            if actual_pixel_size != pixel_size:
                agg *= (actual_pixel_size / pixel_size)**2
        else:
            agg[~np.isfinite(agg)] = np.nan  # replaces inf by nan

        if depth == 1:
            result[result_column] = agg[0]
        else:
            # store an array in a dataframe cell: set each cell with [np.array]
            result[result_column] = [[x] for x in agg.T]

        return {"features": result, "projection": req_srs}