Exemplo n.º 1
0
def legend(layer, style):
    platforms = get_layers()
    product = platforms.product_index.get(layer)
    if not product:
        return ("Unknown Layer", 404,
                resp_headers({"Content-Type": "text/plain"}))
    img = create_legend_for_style(product, style)
    if not img:
        return ("Unknown Style", 404,
                resp_headers({"Content-Type": "text/plain"}))
    return img
Exemplo n.º 2
0
def desc_coverages(args):
    # Note: Only WCS v1.0.0 is fully supported at this stage, so no version negotiation is necessary
    # Extract layer metadata from Datacube.
    platforms = get_layers(refresh=True)

    coverages = args.get("coverage")
    products = []
    if coverages:
        coverages = coverages.split(",")
        for c in coverages:
            p = platforms.product_index.get(c)
            if p:
                products.append(p)
            else:
                raise WCS1Exception("Invalid coverage: %s" % c,
                                    WCS1Exception.COVERAGE_NOT_DEFINED,
                                    locator="Coverage parameter")
    else:
        for plat in platforms:
            for p in plat.products:
                products.append(p)

    return (render_template("wcs_desc_coverage.xml",
                            service=get_service_cfg(),
                            products=products), 200,
            resp_headers({
                "Content-Type": "application/xml",
                "Cache-Control": "max-age=10"
            }))
Exemplo n.º 3
0
def get_capabilities(args):
    # TODO: Handle updatesequence request parameter for cache consistency.
    # Note: Only WMS v1.0.0 exists at this stage, so no version negotiation is necessary
    # Extract layer metadata from Datacube.
    platforms = get_layers(refresh=True)
    service_cfg = get_service_cfg()
    url = args.get('Host', args['url_root'])
    base_url = get_service_base_url(service_cfg.allowed_urls, url)
    section = args.get("section")
    if section:
        section = section.lower()
    show_service_id = False
    show_service_provider = False
    show_ops_metadata = False
    show_contents = False
    show_themes = False
    if section is None:
        show_service_id = True
        show_service_provider = True
        show_ops_metadata = True
        show_contents = True
        show_themes = True
    else:
        sections = section.split(",")
        for s in sections:
            if s == "all":
                show_service_id = True
                show_service_provider = True
                show_ops_metadata = True
                show_contents = True
                show_themes = True
            elif s == "serviceidentification":
                show_service_id = True
            elif s == "serviceprovider":
                show_service_provider = True
            elif s == "operationsmetadata":
                show_ops_metadata = True
            elif s == "contents":
                show_contents = True
            elif s == "themes":
                show_themes = True
            else:
                raise WMTSException("Invalid section: %s" % section,
                                    WMTSException.INVALID_PARAMETER_VALUE,
                                    locator="Section parameter")
    return (render_template("wmts_capabilities.xml",
                            service=service_cfg,
                            platforms=platforms,
                            base_url=base_url,
                            show_service_id=show_service_id,
                            show_service_provider=show_service_provider,
                            show_ops_metadata=show_ops_metadata,
                            show_contents=show_contents,
                            show_themes=show_themes,
                            webmerc_ss=WebMercScaleSet), 200,
            resp_headers({
                "Content-Type": "application/xml",
                "Cache-Control": "no-cache,max-age=0"
            }))
Exemplo n.º 4
0
 def exception_response(self, traceback=[]):
     return (render_template("ogc_error.xml",
                             exception=self,
                             traceback=traceback,
                             version=self.version,
                             schema_url=self.schema_url),
             self.http_response,
             resp_headers({"Content-Type": "application/xml"}))
Exemplo n.º 5
0
def ping():
    db_ok = False
    try:
        with cube() as dc:
            conn = dc.index._db._engine.connect()  # pylint: disable=protected-access
            results = conn.execute(
                """
                    SELECT COUNT(*)
                    FROM agdc.dataset_type""", )
            for r in results:
                db_ok = True
    except Exception:
        pass
    if db_ok:
        return (render_template("ping.html", status="Up"), 200,
                resp_headers({"Content-Type": "text/html"}))
    else:
        return (render_template("ping.html", status="Down"), 500,
                resp_headers({"Content-Type": "text/html"}))
Exemplo n.º 6
0
def get_coverage(args):
    # Note: Only WCS v1.0.0 is fully supported at this stage, so no version negotiation is necessary
    req = WCS1GetCoverageRequest(args)
    data = get_coverage_data(req)
    return (req.format["renderer"](req, data), 200,
            resp_headers({
                "Content-Type":
                req.format["mime"],
                'content-disposition':
                'attachment; filename=%s.%s' %
                (req.product_name, req.format["extension"])
            }))
Exemplo n.º 7
0
def get_capabilities(args):
    # TODO: Handle updatesequence request parameter for cache consistency.
    # Note: Only WMS v1.3.0 is fully supported at this stage, so no version negotiation is necessary
    # Extract layer metadata from Datacube.
    platforms = get_layers(refresh=True)
    return render_template("wms_capabilities.xml",
                           service=get_service_cfg(),
                           platforms=platforms), 200, resp_headers({
                               "Content-Type":
                               "application/xml",
                               "Cache-Control":
                               "no-cache,max-age=0"
                           })
Exemplo n.º 8
0
def get_capabilities(args):
    encoder = CapabilitiesEncoder()

    return (
        encoder.serialize(
            encoder.encode_capabilities(args.get('sections', 'all'), DummyConf())
        ),
        200,
        resp_headers({
            "Content-Type": "application/xml",
            "Cache-Control": "no-cache, max-age=0"
        })
    )
Exemplo n.º 9
0
def get_capabilities(args):
    # TODO: Handle updatesequence request parameter for cache consistency.
    # Note: Only WMS v1.3.0 is fully supported at this stage, so no version negotiation is necessary
    # Extract layer metadata from Datacube.
    platforms = get_layers(refresh=True)
    service_cfg = get_service_cfg()
    url = args.get('Host', args['url_root'])
    base_url = get_service_base_url(service_cfg.allowed_urls, url)
    return (render_template("wms_capabilities.xml",
                            service=service_cfg,
                            platforms=platforms,
                            base_url=base_url), 200,
            resp_headers({
                "Content-Type": "application/xml",
                "Cache-Control": "max-age=10"
            }))
Exemplo n.º 10
0
def get_capabilities(args):
    # TODO: Handle updatesequence request parameter for cache consistency.
    # Note: Only WCS v1.0.0 is fully supported at this stage, so no version negotiation is necessary
    section = args.get("section")
    if section:
        section = section.lower()
    show_service = False
    show_capability = False
    show_content_metadata = False
    if section is None or section == "/":
        show_service = True
        show_capability = True
        show_content_metadata = True
    elif section == "/wcs_capabilities/service":
        show_service = True
    elif section == "/wcs_capabilities/capability":
        show_capability = True
    elif section == "/wcs_capabilities/contentmetadata":
        show_content_metadata = True
    else:
        raise WCS1Exception("Invalid section: %s" % section,
                            WCS1Exception.INVALID_PARAMETER_VALUE,
                            locator="Section parameter")

    # Extract layer metadata from Datacube.
    platforms = get_layers(refresh=True)
    service_cfg = get_service_cfg()
    url = args.get('Host', args['url_root'])
    base_url = get_service_base_url(service_cfg.allowed_urls, url)
    return (
        render_template("wcs_capabilities.xml",
                        show_service=show_service,
                        show_capability=show_capability,
                        show_content_metadata=show_content_metadata,
                        service=service_cfg,
                        platforms=platforms,
                        base_url=base_url),
        200,
        resp_headers({
            "Content-Type": "application/xml",
            "Cache-Control": "no-cache, max-age=0"
        }))
Exemplo n.º 11
0
def get_coverage(args):
    decoder = WCS20GetCoverageKVPDecoder(args)

    try:
        coverage_id = decoder.coverage_id
    except KeyError:
        raise WCS2Exception("Missing coverageid parameter", locator="coverageid")

    svc_cfg = get_service_cfg(refresh=True)
    layers = get_layers(refresh=True)

    product = layers.product_index.get(coverage_id)

    if not product:
        raise WCS2Exception("Invalid coverage: %s" % coverage_id,
                            WCS2Exception.COVERAGE_NOT_DEFINED,
                            locator="COVERAGE parameter")

    if decoder.format:
        if decoder.format not in svc_cfg.wcs_formats:
            raise WCS2Exception("Unsupported format: %s" % decoder.format,
                                WCS2Exception.INVALID_PARAMETER_VALUE,
                                locator="FORMAT parameter")
    elif not svc_cfg.native_wcs_format:
        raise WCS2Exception("Missing parameter format 'format'",
                            WCS2Exception.MISSING_PARAMETER_VALUE,
                            locator="FORMAT parameter")

    fmt_cfg = svc_cfg.wcs_formats[decoder.format or svc_cfg.native_wcs_format]

    

    return (
        '',
        200,
        resp_headers({
            "Content-Type": "application/xml",
            "Cache-Control": "no-cache, max-age=0"
        })
    )
Exemplo n.º 12
0
def desc_coverages(args):
    try:
        coverage_ids = [s.strip() for s in args['coverageid'].split(',')]
    except KeyError:
        raise WCS2Exception("Missing coverageid parameter", locator="coverageid")

    svc_cfg = get_service_cfg(refresh=True)
    layers = get_layers(refresh=True)

    products = []
    for coverage_id in coverage_ids:
        product = layers.product_index.get(coverage_id)
        if product:
            products.append(product)
        else:
            raise WCS2Exception("Invalid coverage: %s" % coverage_id,
                                WCS2Exception.COVERAGE_NOT_DEFINED,
                                locator="Coverage parameter")

    # TODO: make a coverge object from each of the 'products'

    coverages = [
        get_coverage_object(svc_cfg, product)
        for product in products
    ]

    encoder = WCS21XMLEncoder()
    return (
        encoder.serialize(
            encoder.encode_coverage_descriptions(coverages)
        ),
        200,
        resp_headers({
            "Content-Type": "application/xml",
            "Cache-Control": "no-cache, max-age=0"
        })
    )
Exemplo n.º 13
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)
    feature_json = {}

    geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
    # shrink geobox to point
    # Prepare to extract feature info
    if geobox_is_point(params.geobox):
        geo_point_geobox = params.geobox
    else:
        geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon(
            geo_point, params.geobox.resolution, crs=params.geobox.crs)
    stacker = DataStacker(params.product, geo_point_geobox, params.time)

    # --- Begin code section requiring datacube.
    service_cfg = get_service_cfg()
    with cube() as dc:
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)
        pq_datasets = stacker.datasets(dc.index,
                                       mask=True,
                                       all_time=False,
                                       point=geo_point)

        # Taking the data as a single point so our indexes into the data should be 0,0
        h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"]
        s3_bucket = service_cfg.s3_bucket
        s3_url = service_cfg.s3_url
        isel_kwargs = {h_coord: 0, v_coord: 0}
        if datasets:
            dataset_date_index = {}
            tz = None
            for ds in datasets:
                if tz is None:
                    crs_geo = geometry.CRS("EPSG:4326")
                    ptg = geo_point.to_crs(crs_geo)
                    tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1])
                ld = local_date(ds, tz=tz)
                if ld in dataset_date_index:
                    dataset_date_index[ld].append(ds)
                else:
                    dataset_date_index[ld] = [ds]
            # Group datasets by time, load only datasets that match the idx_date
            available_dates = dataset_date_index.keys()
            ds_at_time = dataset_date_index.get(params.time, [])
            _LOG.info("%d datasets, %d at target date", len(datasets),
                      len(ds_at_time))
            if len(ds_at_time) > 0:
                pixel_ds = None
                data = stacker.data(
                    ds_at_time,
                    skip_corrections=True,
                    manual_merge=params.product.data_manual_merge,
                    fuse_func=params.product.fuse_func)

                # Non-geographic coordinate systems need to be projected onto a geographic
                # coordinate system.  Why not use EPSG:4326?
                # Extract coordinates in CRS
                data_x = getattr(data, h_coord)
                data_y = getattr(data, v_coord)

                x = data_x[isel_kwargs[h_coord]].item()
                y = data_y[isel_kwargs[v_coord]].item()
                pt = geometry.point(x, y, params.crs)

                if params.product.multi_product:
                    feature_json["source_product"] = "%s (%s)" % (
                        ds_at_time[0].type.name,
                        ds_at_time[0].metadata_doc["platform"]["code"])

                # Project to EPSG:4326
                crs_geo = geometry.CRS("EPSG:4326")
                ptg = pt.to_crs(crs_geo)

                # Capture lat/long coordinates
                feature_json["lon"], feature_json["lat"] = ptg.coords[0]

                # Extract data pixel
                pixel_ds = data.isel(**isel_kwargs)

                # Get accurate timestamp from dataset
                feature_json["time"] = dataset_center_time(
                    ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC")

                # Collect raw band values for pixel and derived bands from styles
                feature_json["bands"] = _make_band_dict(
                    params.product, pixel_ds, stacker.needed_bands())
                derived_band_dict = _make_derived_band_dict(
                    pixel_ds, params.product.style_index)
                if derived_band_dict:
                    feature_json["band_derived"] = derived_band_dict
                if callable(params.product.feature_info_include_custom):
                    additional_data = params.product.feature_info_include_custom(
                        feature_json["bands"])
                    feature_json.update(additional_data)

            my_flags = 0
            for pqd in pq_datasets:
                idx_date = dataset_center_time(pqd)
                if idx_date == params.time:
                    pq_data = stacker.data([pqd], mask=True)
                    pq_pixel_ds = pq_data.isel(**isel_kwargs)
                    # PQ flags
                    m = params.product.pq_product.measurements[
                        params.product.pq_band]
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    feature_json["flags"] = {}
                    for mk, mv in m["flags_definition"].items():
                        if mk in params.product.ignore_flags_info:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if not isinstance(bits, int):
                            continue
                        flag = 1 << bits
                        if my_flags & flag:
                            val = values['1']
                        else:
                            val = values['0']
                        feature_json["flags"][mk] = val

            feature_json["data_available_for_dates"] = [
                d.strftime("%Y-%m-%d") for d in sorted(available_dates)
            ]
            feature_json["data_links"] = sorted(
                get_s3_browser_uris(ds_at_time, s3_url, s3_bucket))
            if params.product.feature_info_include_utc_dates:
                feature_json["data_available_for_utc_dates"] = sorted(
                    d.center_time.strftime("%Y-%m-%d") for d in datasets)
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json.dumps(result), 200, resp_headers(
        {"Content-Type": "application/json"})
Exemplo n.º 14
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)

    with cube() as dc:
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.time,
                              params.resampling,
                              style=params.style)
        datasets = stacker.datasets(dc.index)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = (params.product.max_datasets_wms > 0 and
                             len(datasets) > params.product.max_datasets_wms)
        if not datasets:
            body = _write_empty(params.geobox)
        elif too_many_datasets:
            body = _write_polygon(params.geobox, params.geobox.extent,
                                  params.product.zoom_fill)
        elif zoomed_out:
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for ds in datasets:
                if extent:
                    new_extent = bbox_to_geom(ds.extent.boundingbox,
                                              ds.extent.crs)
                    if new_extent.crs != extent_crs:
                        new_extent = new_extent.to_crs(extent_crs)
                    extent = extent.union(new_extent)
                else:
                    extent = bbox_to_geom(ds.extent.boundingbox, ds.extent.crs)
                    extent_crs = extent.crs
            extent = extent.to_crs(params.crs)
            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                fuse_func=params.product.fuse_func)
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                else:
                    pq_datasets = stacker.datasets(
                        dc.index,
                        mask=True,
                        all_time=params.product.pq_ignore_time)
                    if pq_datasets:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            fuse_func=params.product.pq_fuse_func)
                    else:
                        pq_data = None
            else:
                pq_data = None
            extent_mask = None
            if not params.product.data_manual_merge:
                for band in params.style.needed_bands:
                    for f in params.product.extent_mask_func:
                        if extent_mask is None:
                            extent_mask = f(data, band)
                        else:
                            extent_mask &= f(data, band)

            if data is None or (params.style.masks and pq_data is None):
                body = _write_empty(params.geobox)
            else:
                body = _write_png(data, pq_data, params.style, extent_mask)

    return body, 200, resp_headers({"Content-Type": "image/png"})
Exemplo n.º 15
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)

    # Prepare to extract feature info
    stacker = DataStacker(params.product, params.geobox, params.time)
    feature_json = {}

    # --- Begin code section requiring datacube.
    service_cfg = get_service_cfg()
    dc = get_cube()
    try:
        geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)
        pq_datasets = stacker.datasets(dc.index,
                                       mask=True,
                                       all_time=False,
                                       point=geo_point)

        h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"]
        isel_kwargs = {h_coord: [params.i], v_coord: [params.j]}
        if not datasets:
            pass
        else:
            available_dates = set()
            drill = {}
            for d in datasets:
                idx_date = (d.center_time +
                            timedelta(hours=params.product.time_zone)).date()
                available_dates.add(idx_date)
                pixel_ds = None
                if idx_date == params.time and "lon" not in feature_json:
                    data = stacker.data([d], skip_corrections=True)

                    # Use i,j image coordinates to extract data pixel from dataset, and
                    # convert to lat/long geographic coordinates
                    if service_cfg.published_CRSs[params.crsid]["geographic"]:
                        # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long
                        feature_json["lat"] = data.latitude[params.j].item()
                        feature_json["lon"] = data.longitude[params.i].item()
                        pixel_ds = data.isel(**isel_kwargs)
                    else:
                        # Non-geographic coordinate systems need to be projected onto a geographic
                        # coordinate system.  Why not use EPSG:4326?
                        # Extract coordinates in CRS
                        data_x = getattr(data, h_coord)
                        data_y = getattr(data, v_coord)

                        x = data_x[params.i].item()
                        y = data_y[params.j].item()
                        pt = geometry.point(x, y, params.crs)

                        # Project to EPSG:4326
                        crs_geo = geometry.CRS("EPSG:4326")
                        ptg = pt.to_crs(crs_geo)

                        # Capture lat/long coordinates
                        feature_json["lon"], feature_json["lat"] = ptg.coords[
                            0]

                    # Extract data pixel
                    pixel_ds = data.isel(**isel_kwargs)

                    # Get accurate timestamp from dataset
                    feature_json["time"] = d.center_time.strftime(
                        "%Y-%m-%d %H:%M:%S UTC")

                    feature_json["bands"] = {}
                    # Collect raw band values for pixel
                    for band in stacker.needed_bands():
                        ret_val = band_val = pixel_ds[band].item()
                        if band_val == pixel_ds[band].nodata:
                            feature_json["bands"][band] = "n/a"
                        else:
                            if hasattr(pixel_ds[band], 'flags_definition'):
                                flag_def = pixel_ds[band].flags_definition
                                flag_dict = mask_to_dict(flag_def, band_val)
                                ret_val = [
                                    flag_def[k]['description']
                                    for k in filter(flag_dict.get, flag_dict)
                                ]
                            feature_json["bands"][band] = ret_val

                    for k, v in filter(
                            lambda kv: hasattr(kv[1], 'index_function'),
                            params.product.style_index.items()):
                        if v.index_function is None:
                            continue

                        vals_nodata = [
                            pixel_ds[b] == pixel_ds[b].nodata
                            for b in v.needed_bands
                        ]
                        if any(vals_nodata):
                            continue

                        value = v.index_function(pixel_ds).item()
                        try:
                            feature_json["band_derived"][k] = value
                        except KeyError:
                            feature_json["band_derived"] = {}
                            feature_json["band_derived"][k] = value

                if params.product.band_drill:
                    if pixel_ds is None:
                        data = stacker.data([d], skip_corrections=True)
                        pixel_ds = data.isel(**isel_kwargs)
                    drill_section = {}
                    for band in params.product.band_drill:
                        band_val = pixel_ds[band].item()
                        if band_val == pixel_ds[band].nodata:
                            drill_section[band] = "n/a"
                        else:
                            drill_section[band] = pixel_ds[band].item()
                    drill[idx_date.strftime("%Y-%m-%d")] = drill_section
            if drill:
                feature_json["time_drill"] = drill
                feature_json["datasets_read"] = len(datasets)
            my_flags = 0
            pqdi = -1
            for pqd in pq_datasets:
                pqdi += 1
                idx_date = (pqd.center_time +
                            timedelta(hours=params.product.time_zone)).date()
                if idx_date == params.time:
                    pq_data = stacker.data([pqd], mask=True)
                    pq_pixel_ds = pq_data.isel(**isel_kwargs)
                    # PQ flags
                    m = params.product.pq_product.measurements[
                        params.product.pq_band]
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    feature_json["flags"] = {}
                    for mk, mv in m["flags_definition"].items():
                        if mk in params.product.ignore_flags_info:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if not isinstance(bits, int):
                            continue
                        flag = 1 << bits
                        if my_flags & flag:
                            val = values['1']
                        else:
                            val = values['0']
                        feature_json["flags"][mk] = val

            lads = list(available_dates)
            lads.sort()
            feature_json["data_available_for_dates"] = [
                d.strftime("%Y-%m-%d") for d in lads
            ]
            feature_json["data_links"] = sorted(get_s3_browser_uris(datasets))
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json.dumps(result), 200, resp_headers(
        {"Content-Type": "application/json"})
Exemplo n.º 16
0
def get_map(args):
    # Parse GET parameters
    params = GetMapParameters(args)

    # Tiling.
    stacker = DataStacker(params.product,
                          params.geobox,
                          params.time,
                          style=params.style)
    dc = get_cube()
    try:
        datasets = stacker.datasets(dc.index)
        if not datasets:
            body = _write_empty(params.geobox)
        elif params.zf < params.product.min_zoom or (
                params.product.max_datasets_wms > 0
                and len(datasets) > params.product.max_datasets_wms):
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for ds in datasets:
                if extent:
                    new_extent = ds.extent
                    if new_extent.crs != extent_crs:
                        new_extent = new_extent.to_crs(extent_crs)
                    extent = extent.union(new_extent)
                else:
                    extent = ds.extent
                    extent_crs = extent.crs
            extent = extent.to_crs(params.crs)

            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge)
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_data = xarray.Dataset(
                        {
                            params.product.pq_band:
                            (data[params.product.pq_band].dims,
                             data[params.product.pq_band].astype("uint16"))
                        },
                        coords=data[params.product.pq_band].coords)
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = data[
                                params.product.pq_band].flags_definition
                else:
                    pq_datasets = stacker.datasets(dc.index, mask=True)
                    if pq_datasets:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge)
                    else:
                        pq_data = None
            else:
                pq_data = None
            extent_mask = None
            if not params.product.data_manual_merge:
                for band in params.style.needed_bands:
                    for f in params.product.extent_mask_func:
                        if extent_mask is None:
                            extent_mask = f(data, band)
                        else:
                            extent_mask &= f(data, band)

            if data is not None:
                body = _write_png(data, pq_data, params.style, extent_mask)
            else:
                body = _write_empty(params.geobox)
        release_cube(dc)
    except Exception as e:
        release_cube(dc)
        raise e
    return body, 200, resp_headers({"Content-Type": "image/png"})