Пример #1
0
 def __init__(self, platforms_cfg, refresh = False):
     if not self.initialised or refresh:
         self.initialised = True
         self.platforms = []
         self.platform_index = {}
         self.product_index = {}
         dc = get_cube()
         for platform_cfg in platforms_cfg:
             platform = PlatformLayerDef(platform_cfg, self.product_index, dc=dc)
             self.platforms.append(platform)
             self.platform_index[platform.name] = platform
         release_cube(dc)
Пример #2
0
from datacube_wms.product_ranges import update_all_ranges
from datacube_wms.cube_pool import get_cube, release_cube

if __name__ == "__main__":
    app="wms_update"
    dc = get_cube(app=app)
    passed, updated, inserted = update_all_ranges(dc)
    release_cube(dc, app=app)
    print ("%d existing products unchanged" % passed)
    print ("%d existing products updated" % updated)
    print ("%d new products inserted" % inserted)
Пример #3
0
def get_coverage_data(req):
    #pylint: disable=too-many-locals, protected-access
    dc = get_cube()
    datasets = []
    for t in req.times:
        # IF t was passed to the datasets method instead of the stacker
        # constructor, we could use the one stacker.
        stacker = DataStacker(req.product, req.geobox, t, bands=req.bands)
        t_datasets = stacker.datasets(dc.index)
        if not t_datasets:
            # No matching data for this date
            continue
        datasets.extend(t_datasets)
    if not datasets:
        # TODO: Return an empty coverage file with full metadata?
        extents = dc.load(dask_chunks={},
                          product=req.product.product.name,
                          geopolygon=req.geobox.extent,
                          time=stacker._time)
        svc = get_service_cfg()
        x_range = (req.minx, req.maxx)
        y_range = (req.miny, req.maxy)
        xname = svc.published_CRSs[req.request_crsid]["horizontal_coord"]
        yname = svc.published_CRSs[req.request_crsid]["vertical_coord"]
        if xname in extents:
            xvals = extents[xname]
        else:
            xvals = numpy.linspace(x_range[0], x_range[1], num=req.width)
        if yname in extents:
            yvals = extents[yname]
        else:
            yvals = numpy.linspace(y_range[0], y_range[1], num=req.height)
        if svc.published_CRSs[req.request_crsid]["vertical_coord_first"]:
            nparrays = {
                band: ((yname, xname),
                       numpy.full((len(yvals), len(xvals)),
                                  req.product.nodata_dict[band]))
                for band in req.bands
            }
        else:
            nparrays = {
                band: ((xname, yname),
                       numpy.full((len(xvals), len(yvals)),
                                  req.product.nodata_dict[band]))
                for band in req.bands
            }
        data = xarray.Dataset(nparrays, coords={
            xname: xvals,
            yname: yvals,
        }).astype("int16")
        release_cube(dc)
        return data

    if req.product.max_datasets_wcs > 0 and len(
            datasets) > req.product.max_datasets_wcs:
        raise WCS1Exception(
            "This request processes too much data to be served in a reasonable amount of time."
            "Please reduce the bounds of your request and try again."
            "(max: %d, this request requires: %d)" %
            (req.product.max_datasets_wcs, len(datasets)))

    if req.format["multi-time"] and len(req.times) > 1:
        # Group by solar day
        group_by = datacube.api.query.query_group_by(time=req.times,
                                                     group_by='solar_day')
        datasets = dc.group_datasets(datasets, group_by)

    stacker = DataStacker(req.product,
                          req.geobox,
                          req.times[0],
                          bands=req.bands)
    output = stacker.data(datasets, skip_corrections=True)
    release_cube(dc)
    return output
Пример #4
0
def feature_info(args):
    # Version parameter
    version = get_arg(args,
                      "version",
                      "WMS version",
                      permitted_values=["1.1.1", "1.3.0"])

    # Layer/product
    product = get_product_from_arg(args, "query_layers")

    fmt = get_arg(args,
                  "info_format",
                  "info format",
                  lower=True,
                  errcode=WMSException.INVALID_FORMAT,
                  permitted_values=["application/json"])

    # CRS parameter
    if version == "1.1.1":
        crs_arg = "srs"
    else:
        crs_arg = "crs"
    crsid = get_arg(args,
                    crs_arg,
                    "Coordinate Reference System",
                    errcode=WMSException.INVALID_FORMAT,
                    permitted_values=service_cfg["published_CRSs"].keys())
    crs = geometry.CRS(crsid)

    # BBox, height and width parameters
    geobox = _get_geobox(args, crs)

    # Time parameter
    time = get_time(args, product)

    # Point coords
    if version == "1.1.1":
        coords = ["x", "y"]
    else:
        coords = ["i", "j"]
    i = args.get(coords[0])
    j = args.get(coords[1])
    if i is None:
        raise WMSException("HorizontalCoordinate not supplied",
                           WMSException.INVALID_POINT,
                           "%s parameter" % coords[0])
    if j is None:
        raise WMSException("Vertical coordinate not supplied",
                           WMSException.INVALID_POINT,
                           "%s parameter" % coords[0])
    i = int(i)
    j = int(j)

    # Prepare to extract feature info
    tiler = RGBTileGenerator(product, geobox, time)
    feature_json = {}

    # --- Begin code section requiring datacube.
    dc = get_cube()
    geo_point = img_coords_to_geopoint(geobox, i, j)
    datasets = tiler.datasets(dc.index, all_time=True, point=geo_point)
    pq_datasets = tiler.datasets(dc.index,
                                 mask=True,
                                 all_time=True,
                                 point=geo_point)

    if service_cfg["published_CRSs"][crsid]["geographic"]:
        h_coord = "longitude"
        v_coord = "latitude"
    else:
        h_coord = service_cfg["published_CRSs"][crsid]["horizontal_coord"]
        v_coord = service_cfg["published_CRSs"][crsid]["vertical_coord"]
    isel_kwargs = {h_coord: [i], v_coord: [j]}
    if not datasets:
        pass
    else:
        available_dates = set()
        for d in datasets:
            idx_date = (d.center_time +
                        timedelta(hours=product.time_zone)).date()
            available_dates.add(idx_date)
            if idx_date == time and "lon" not in feature_json:
                data = tiler.data([d])

                # Use i,j image coordinates to extract data pixel from dataset, and
                # convert to lat/long geographic coordinates
                if service_cfg["published_CRSs"][crsid]["geographic"]:
                    # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long
                    feature_json["lat"] = data.latitude[j].item()
                    feature_json["lon"] = data.longitude[i].item()
                    pixel_ds = data.isel(**isel_kwargs)
                else:
                    # Non-geographic coordinate systems need to be projected onto a geographic
                    # coordinate system.  Why not use EPSG:4326?
                    # Extract coordinates in CRS
                    data_x = getattr(data, h_coord)
                    data_y = getattr(data, v_coord)

                    x = data_x[i].item()
                    y = data_y[j].item()
                    pt = geometry.point(x, y, crs)

                    # Project to EPSG:4326
                    crs_geo = geometry.CRS("EPSG:4326")
                    ptg = pt.to_crs(crs_geo)

                    # Capture lat/long coordinates
                    feature_json["lon"], feature_json["lat"] = ptg.coords[0]

                # Extract data pixel
                pixel_ds = data.isel(**isel_kwargs)

                # Get accurate timestamp from dataset
                feature_json["time"] = d.center_time.strftime(
                    "%Y-%m-%d %H:%M:%S UTC")

                # Collect raw band values for pixel
                feature_json["bands"] = {}
                for band in tiler.needed_bands():
                    band_val = pixel_ds[band].item()
                    if band_val == -999:
                        feature_json["bands"][band] = "n/a"
                    else:
                        feature_json["bands"][band] = pixel_ds[band].item()

        my_flags = 0
        for pqd in pq_datasets:
            idx_date = (pqd.center_time +
                        timedelta(hours=product.time_zone)).date()
            if idx_date == time:
                pq_data = tiler.data([pqd], mask=True)
                pq_pixel_ds = pq_data.isel(**isel_kwargs)
                # PQ flags
                m = product.pq_product.measurements[product.pq_band]
                flags = pq_pixel_ds[product.pq_band].item()
                my_flags = flags | flags
                feature_json["flags"] = {}
                for mk, mv in m["flags_definition"].items():
                    bits = mv["bits"]
                    values = mv["values"]
                    if not isinstance(bits, int):
                        continue
                    flag = 1 << bits
                    if my_flags & flag:
                        val = values['1']
                    else:
                        val = values['0']
                    feature_json["flags"][mk] = val

        lads = list(available_dates)
        lads.sort()
        feature_json["data_available_for_dates"] = [
            d.strftime("%Y-%m-%d") for d in lads
        ]
    release_cube(dc)
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json.dumps(result), 200, resp_headers(
        {"Content-Type": "application/json"})
Пример #5
0
def get_map(args):
    # Version parameter
    # GetMap 1.1.1 must be supported for Terria
    version = get_arg(args,
                      "version",
                      "WMS version",
                      permitted_values=["1.1.1", "1.3.0"])

    # CRS parameter
    if version == "1.1.1":
        crs_arg = "srs"
    else:
        crs_arg = "crs"
    crsid = get_arg(args,
                    crs_arg,
                    "Coordinate Reference System",
                    errcode=WMSException.INVALID_CRS,
                    permitted_values=service_cfg["published_CRSs"].keys())
    crs = geometry.CRS(crsid)

    # Layers and Styles parameters
    product = get_product_from_arg(args)
    styles = args.get("styles", "").split(",")
    if len(styles) != 1:
        raise WMSException("Multi-layer GetMap requests not supported")
    style_r = styles[0]
    if not style_r:
        style_r = product.platform.default_style
    style = product.platform.style_index.get(style_r)
    if not style:
        raise WMSException("Style %s is not defined" % style_r,
                           WMSException.STYLE_NOT_DEFINED,
                           locator="Style parameter")

    # Format parameter
    fmt = get_arg(args,
                  "format",
                  "image format",
                  errcode=WMSException.INVALID_FORMAT,
                  lower=True,
                  permitted_values=["image/png"])

    # BBox, height and width parameters
    geobox = _get_geobox(args, crs)

    # Zoom Factor
    zf = zoom_factor(args, crs)

    # Time parameter
    time = get_time(args, product)

    # Tiling.
    tiler = RGBTileGenerator(product, geobox, time, style=style)
    dc = get_cube()
    datasets = tiler.datasets(dc.index)
    if style.pq_mask_flags:
        pq_datasets = tiler.datasets(dc.index, mask=True)
    else:
        pq_datasets = None
    if not datasets:
        body = _write_empty(geobox)
    elif zf < product.min_zoom:
        # Zoomed out to far to properly render data.
        # Construct a polygon which is the union of the extents of the matching datasets.
        extent = None
        for ds in datasets:
            if extent:
                extent = extent.union(ds.extent)
            else:
                extent = ds.extent
        extent = extent.to_crs(geobox.crs)

        body = _write_polygon(geobox, extent, product.zoom_fill)
    else:
        masks = []
        data = tiler.data(datasets)
        for band in style.needed_bands:
            extent_mask = (data[band] != data[band].attrs['nodata'])
        if pq_datasets:
            # ??????
            # sources = datacube.Datacube.group_datasets(datasets, datacube.api.query.query_group_by())
            # pq_sources = datacube.Datacube.group_datasets(pq_datasets, datacube.api.query.query_group_by())
            # sources, pq_sources = xarray.align(sources, pq_sources)

            pq_data = tiler.data(pq_datasets, mask=True)

            mask = make_mask(pq_data, **style.pq_mask_flags)
            mask_data = mask.pixelquality
            masks.append(mask_data)

        if data:
            body = _write_png(data, style, extent_mask, *masks)
        else:
            body = _write_empty(geobox)
    release_cube(dc)
    return body, 200, resp_headers({"Content-Type": "image/png"})
Пример #6
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)

    # Prepare to extract feature info
    stacker = DataStacker(params.product, params.geobox, params.time)
    feature_json = {}

    # --- Begin code section requiring datacube.
    service_cfg = get_service_cfg()
    dc = get_cube()
    try:
        geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)
        pq_datasets = stacker.datasets(dc.index,
                                       mask=True,
                                       all_time=False,
                                       point=geo_point)

        h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"]
        isel_kwargs = {h_coord: [params.i], v_coord: [params.j]}
        if not datasets:
            pass
        else:
            available_dates = set()
            drill = {}
            for d in datasets:
                idx_date = (d.center_time +
                            timedelta(hours=params.product.time_zone)).date()
                available_dates.add(idx_date)
                pixel_ds = None
                if idx_date == params.time and "lon" not in feature_json:
                    data = stacker.data([d], skip_corrections=True)

                    # Use i,j image coordinates to extract data pixel from dataset, and
                    # convert to lat/long geographic coordinates
                    if service_cfg.published_CRSs[params.crsid]["geographic"]:
                        # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long
                        feature_json["lat"] = data.latitude[params.j].item()
                        feature_json["lon"] = data.longitude[params.i].item()
                        pixel_ds = data.isel(**isel_kwargs)
                    else:
                        # Non-geographic coordinate systems need to be projected onto a geographic
                        # coordinate system.  Why not use EPSG:4326?
                        # Extract coordinates in CRS
                        data_x = getattr(data, h_coord)
                        data_y = getattr(data, v_coord)

                        x = data_x[params.i].item()
                        y = data_y[params.j].item()
                        pt = geometry.point(x, y, params.crs)

                        # Project to EPSG:4326
                        crs_geo = geometry.CRS("EPSG:4326")
                        ptg = pt.to_crs(crs_geo)

                        # Capture lat/long coordinates
                        feature_json["lon"], feature_json["lat"] = ptg.coords[
                            0]

                    # Extract data pixel
                    pixel_ds = data.isel(**isel_kwargs)

                    # Get accurate timestamp from dataset
                    feature_json["time"] = d.center_time.strftime(
                        "%Y-%m-%d %H:%M:%S UTC")

                    feature_json["bands"] = {}
                    # Collect raw band values for pixel
                    for band in stacker.needed_bands():
                        ret_val = band_val = pixel_ds[band].item()
                        if band_val == pixel_ds[band].nodata:
                            feature_json["bands"][band] = "n/a"
                        else:
                            if hasattr(pixel_ds[band], 'flags_definition'):
                                flag_def = pixel_ds[band].flags_definition
                                flag_dict = mask_to_dict(flag_def, band_val)
                                ret_val = [
                                    flag_def[k]['description']
                                    for k in filter(flag_dict.get, flag_dict)
                                ]
                            feature_json["bands"][band] = ret_val

                    for k, v in filter(
                            lambda kv: hasattr(kv[1], 'index_function'),
                            params.product.style_index.items()):
                        if v.index_function is None:
                            continue

                        vals_nodata = [
                            pixel_ds[b] == pixel_ds[b].nodata
                            for b in v.needed_bands
                        ]
                        if any(vals_nodata):
                            continue

                        value = v.index_function(pixel_ds).item()
                        try:
                            feature_json["band_derived"][k] = value
                        except KeyError:
                            feature_json["band_derived"] = {}
                            feature_json["band_derived"][k] = value

                if params.product.band_drill:
                    if pixel_ds is None:
                        data = stacker.data([d], skip_corrections=True)
                        pixel_ds = data.isel(**isel_kwargs)
                    drill_section = {}
                    for band in params.product.band_drill:
                        band_val = pixel_ds[band].item()
                        if band_val == pixel_ds[band].nodata:
                            drill_section[band] = "n/a"
                        else:
                            drill_section[band] = pixel_ds[band].item()
                    drill[idx_date.strftime("%Y-%m-%d")] = drill_section
            if drill:
                feature_json["time_drill"] = drill
                feature_json["datasets_read"] = len(datasets)
            my_flags = 0
            pqdi = -1
            for pqd in pq_datasets:
                pqdi += 1
                idx_date = (pqd.center_time +
                            timedelta(hours=params.product.time_zone)).date()
                if idx_date == params.time:
                    pq_data = stacker.data([pqd], mask=True)
                    pq_pixel_ds = pq_data.isel(**isel_kwargs)
                    # PQ flags
                    m = params.product.pq_product.measurements[
                        params.product.pq_band]
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    feature_json["flags"] = {}
                    for mk, mv in m["flags_definition"].items():
                        if mk in params.product.ignore_flags_info:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if not isinstance(bits, int):
                            continue
                        flag = 1 << bits
                        if my_flags & flag:
                            val = values['1']
                        else:
                            val = values['0']
                        feature_json["flags"][mk] = val

            lads = list(available_dates)
            lads.sort()
            feature_json["data_available_for_dates"] = [
                d.strftime("%Y-%m-%d") for d in lads
            ]
            feature_json["data_links"] = sorted(get_s3_browser_uris(datasets))
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json.dumps(result), 200, resp_headers(
        {"Content-Type": "application/json"})
Пример #7
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)

    dc = get_cube()
    # Tiling.
    stacker = DataStacker(params.product,
                          params.geobox,
                          params.time,
                          style=params.style)
    try:
        datasets = stacker.datasets(dc.index)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = (params.product.max_datasets_wms > 0 and
                             len(datasets) > params.product.max_datasets_wms)
        if not datasets:
            body = _write_empty(params.geobox)
        elif zoomed_out or too_many_datasets:
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for ds in datasets:
                if extent:
                    new_extent = bbox_to_geom(ds.extent.boundingbox,
                                              ds.extent.crs)
                    if new_extent.crs != extent_crs:
                        new_extent = new_extent.to_crs(extent_crs)
                    extent = extent.union(new_extent)
                else:
                    extent = bbox_to_geom(ds.extent.boundingbox, ds.extent.crs)
                    extent_crs = extent.crs
            extent = extent.to_crs(params.crs)
            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                use_overviews=True)
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                else:
                    pq_datasets = stacker.datasets(dc.index, mask=True)
                    if pq_datasets:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            use_overviews=True)
                    else:
                        pq_data = None
            else:
                pq_data = None
            extent_mask = None
            if not params.product.data_manual_merge:
                for band in params.style.needed_bands:
                    for f in params.product.extent_mask_func:
                        if extent_mask is None:
                            extent_mask = f(data, band)
                        else:
                            extent_mask &= f(data, band)

            if data is not None:
                body = _write_png(data, pq_data, params.style, extent_mask)
            else:
                body = _write_empty(params.geobox)
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    return body, 200, resp_headers({"Content-Type": "image/png"})
Пример #8
0
def get_map(args):
    # Parse GET parameters
    params = GetMapParameters(args)

    # Tiling.
    stacker = DataStacker(params.product,
                          params.geobox,
                          params.time,
                          style=params.style)
    dc = get_cube()
    try:
        datasets = stacker.datasets(dc.index)
        if not datasets:
            body = _write_empty(params.geobox)
        elif params.zf < params.product.min_zoom or (
                params.product.max_datasets_wms > 0
                and len(datasets) > params.product.max_datasets_wms):
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for ds in datasets:
                if extent:
                    new_extent = ds.extent
                    if new_extent.crs != extent_crs:
                        new_extent = new_extent.to_crs(extent_crs)
                    extent = extent.union(new_extent)
                else:
                    extent = ds.extent
                    extent_crs = extent.crs
            extent = extent.to_crs(params.crs)

            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge)
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_data = xarray.Dataset(
                        {
                            params.product.pq_band:
                            (data[params.product.pq_band].dims,
                             data[params.product.pq_band].astype("uint16"))
                        },
                        coords=data[params.product.pq_band].coords)
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = data[
                                params.product.pq_band].flags_definition
                else:
                    pq_datasets = stacker.datasets(dc.index, mask=True)
                    if pq_datasets:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge)
                    else:
                        pq_data = None
            else:
                pq_data = None
            extent_mask = None
            if not params.product.data_manual_merge:
                for band in params.style.needed_bands:
                    for f in params.product.extent_mask_func:
                        if extent_mask is None:
                            extent_mask = f(data, band)
                        else:
                            extent_mask &= f(data, band)

            if data is not None:
                body = _write_png(data, pq_data, params.style, extent_mask)
            else:
                body = _write_empty(params.geobox)
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    return body, 200, resp_headers({"Content-Type": "image/png"})
Пример #9
0
def get_map(args):
    # Version parameter
    # GetMap 1.1.1 must be supported for Terria
    version = get_arg(args,
                      "version",
                      "WMS version",
                      permitted_values=["1.1.1", "1.3.0"])

    # CRS parameter
    if version == "1.1.1":
        crs_arg = "srs"
    else:
        crs_arg = "crs"
    crsid = get_arg(args,
                    crs_arg,
                    "Coordinate Reference System",
                    errcode=WMSException.INVALID_CRS,
                    permitted_values=service_cfg["published_CRSs"].keys())
    crs = geometry.CRS(crsid)

    # Layers and Styles parameters
    product = get_product_from_arg(args)
    styles = args.get("styles", "").split(",")
    if len(styles) != 1:
        raise WMSException("Multi-layer GetMap requests not supported")
    style_r = styles[0]
    if not style_r:
        style_r = product.default_style
    style = product.style_index.get(style_r)
    if not style:
        raise WMSException("Style %s is not defined" % style_r,
                           WMSException.STYLE_NOT_DEFINED,
                           locator="Style parameter")

    # Format parameter
    fmt = get_arg(args,
                  "format",
                  "image format",
                  errcode=WMSException.INVALID_FORMAT,
                  lower=True,
                  permitted_values=["image/png"])

    # BBox, height and width parameters
    geobox = _get_geobox(args, crs)

    # Zoom Factor
    zf = zoom_factor(args, crs)

    # Time parameter
    time = get_time(args, product)

    # Tiling.
    tiler = RGBTileGenerator(product, geobox, time, style=style)
    dc = get_cube()
    try:
        datasets = tiler.datasets(dc.index)
        if not datasets:
            body = _write_empty(geobox)
        elif zf < product.min_zoom:
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for ds in datasets:
                if extent:
                    new_extent = ds.extent
                    if new_extent.crs != extent_crs:
                        new_extent = new_extent.to_crs(extent_crs)
                    extent = extent.union(new_extent)
                else:
                    extent = ds.extent
                    extent_crs = extent.crs
            extent = extent.to_crs(geobox.crs)

            body = _write_polygon(geobox, extent, product.zoom_fill)
        else:
            data = tiler.data(datasets, manual_merge=product.data_manual_merge)
            if style.masks:
                if product.pq_name == product.name:
                    pq_data = xarray.Dataset(
                        {
                            product.pq_band:
                            (data[product.pq_band].dims,
                             data[product.pq_band].astype("uint16"))
                        },
                        coords=data[product.pq_band].coords)
                    pq_data[product.pq_band].attrs["flags_definition"] = data[
                        product.pq_band].flags_definition
                else:
                    pq_datasets = tiler.datasets(dc.index, mask=True)
                    pq_data = tiler.data(pq_datasets,
                                         mask=True,
                                         manual_merge=product.pq_manual_merge)
            else:
                pq_data = None
            for band in style.needed_bands:
                extent_mask = product.extent_mask_func(data, band)

            if data:
                body = _write_png(data, pq_data, style, extent_mask)
            else:
                body = _write_empty(geobox)
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    return body, 200, resp_headers({"Content-Type": "image/png"})