Ejemplo n.º 1
0
    def subsets(
        self,
        space=SpaceRequestType.CENTRAL_SUBSET_FOR_TIMES,
        time=TimeRequestTypes.LAST,
    ):
        ext_times = time.slice(self.layer.ranges["times"])
        search_times = [self.layer.search_times(t) for t in ext_times]
        with cube() as dc:
            if space.needs_full_extent() and not self.full_extent:
                self.full_extent = mv_search(dc.index,
                                             products=self.layer.products,
                                             sel=MVSelectOpts.EXTENT)
            if space.needs_time_extent():
                time_extent = mv_search(
                    dc.index,
                    products=self.layer.products,
                    sel=MVSelectOpts.EXTENT,
                    times=search_times,
                )
            else:
                time_extent = None

            extent = space.subset(time_extent, self.full_extent)

        return extent, ext_times
Ejemplo n.º 2
0
def test_count():
    cfg = get_config()
    lyr = list(cfg.product_index.values())[0]
    with cube() as dc:
        count = mv_search_datasets(dc.index, MVSelectOpts.COUNT, layer=lyr)
        ids = mv_search_datasets(dc.index, MVSelectOpts.IDS, layer=lyr)
        assert len(ids) == count
Ejemplo n.º 3
0
 def parse_layers(self, cfg):
     self.layers = []
     self.product_index = {}
     self.native_product_index = {}
     with cube() as dc:
         if dc:
             for lyr_cfg in cfg:
                 self.layers.append(parse_ows_layer(lyr_cfg, self, dc))
Ejemplo n.º 4
0
def test_datasets():
    cfg = get_config()
    lyr = list(cfg.product_index.values())[0]
    with cube() as dc:
        dss = mv_search_datasets(dc.index, MVSelectOpts.DATASETS, layer=lyr)
        ids = mv_search_datasets(dc.index, MVSelectOpts.IDS, layer=lyr)
        assert len(ids) == len(dss)
        for ds in dss:
            assert str(ds.id) in ids
Ejemplo n.º 5
0
def get_coverage_data(req):
    #pylint: disable=too-many-locals, protected-access
    with cube() as dc:
        if not dc:
            raise WCS1Exception("Database connectivity failure")
        stacker = DataStacker(req.product,
                              req.geobox,
                              req.times,
                              bands=req.bands)
        datasets = stacker.datasets(dc.index)
        if not datasets:
            # TODO: Return an empty coverage file with full metadata?
            cfg = get_config()
            x_range = (req.minx, req.maxx)
            y_range = (req.miny, req.maxy)
            xname = cfg.published_CRSs[req.request_crsid]["horizontal_coord"]
            yname = cfg.published_CRSs[req.request_crsid]["vertical_coord"]
            xvals = numpy.linspace(x_range[0], x_range[1], num=req.width)
            yvals = numpy.linspace(y_range[0], y_range[1], num=req.height)
            if cfg.published_CRSs[req.request_crsid]["vertical_coord_first"]:
                nparrays = {
                    band: (("time", yname, xname),
                           numpy.full((len(req.times), len(yvals), len(xvals)),
                                      req.product.nodata_dict[band]))
                    for band in req.bands
                }
            else:
                nparrays = {
                    band: (("time", xname, yname),
                           numpy.full((len(req.times), len(xvals), len(yvals)),
                                      req.product.nodata_dict[band]))
                    for band in req.bands
                }
            data = xarray.Dataset(nparrays,
                                  coords={
                                      "time": req.times,
                                      xname: xvals,
                                      yname: yvals,
                                  }).astype("int16")

            return data

        n_datasets = datasets_in_xarray(datasets)
        if req.product.max_datasets_wcs > 0 and n_datasets > req.product.max_datasets_wcs:
            raise WCS1Exception(
                "This request processes too much data to be served in a reasonable amount of time."
                "Please reduce the bounds of your request and try again."
                "(max: %d, this request requires: %d)" %
                (req.product.max_datasets_wcs, n_datasets))

        stacker = DataStacker(req.product,
                              req.geobox,
                              req.times,
                              bands=req.bands)
        output = stacker.data(datasets, skip_corrections=True)
        return output
Ejemplo n.º 6
0
def test_time_search():
    cfg = get_config()
    lyr = list(cfg.product_index.values())[0]
    time = lyr.ranges["times"][-1]
    geom = box(lyr.bboxes["EPSG:4326"]["bottom"],
               lyr.bboxes["EPSG:4326"]["left"], lyr.bboxes["EPSG:4326"]["top"],
               lyr.bboxes["EPSG:4326"]["right"], "EPSG:4326")

    time_rng = local_solar_date_range(MockGeobox(geom), time)
    with cube() as dc:
        sel = mv_search_datasets(dc.index,
                                 MVSelectOpts.COUNT,
                                 times=[time_rng],
                                 layer=lyr)
        assert sel > 0
Ejemplo n.º 7
0
def ping():
    db_ok = False
    with cube() as dc:
        if dc:
            # pylint: disable=protected-access
            with dc.index._db.give_me_a_connection() as conn:
                results = conn.execute("""
                        SELECT *
                        FROM wms.product_ranges
                        LIMIT 1"""
                )
                for r in results:
                    db_ok = True
    if db_ok:
        return (render_template("ping.html", status="Up"), 200, resp_headers({"Content-Type": "text/html"}))
    else:
        return (render_template("ping.html", status="Down"), 500, resp_headers({"Content-Type": "text/html"}))
Ejemplo n.º 8
0
def test_extent_and_spatial():
    cfg = get_config()
    lyr = list(cfg.product_index.values())[0]
    layer_ext_bbx = (
        lyr.bboxes["EPSG:4326"]["left"],
        lyr.bboxes["EPSG:4326"]["bottom"],
        lyr.bboxes["EPSG:4326"]["right"],
        lyr.bboxes["EPSG:4326"]["top"],
    )
    small_bbox = pytest.helpers.enclosed_bbox(layer_ext_bbx)
    layer_ext_geom = box(
        layer_ext_bbx[0],
        layer_ext_bbx[1],
        layer_ext_bbx[2],
        layer_ext_bbx[3],
        "EPSG:4326",
    )
    small_geom = box(small_bbox[0], small_bbox[1], small_bbox[2],
                     small_bbox[3], "EPSG:4326")
    with cube() as dc:
        all_ext = mv_search_datasets(dc.index,
                                     MVSelectOpts.EXTENT,
                                     geom=layer_ext_geom,
                                     layer=lyr)
        small_ext = mv_search_datasets(dc.index,
                                       MVSelectOpts.EXTENT,
                                       geom=small_geom,
                                       layer=lyr)
        assert layer_ext_geom.contains(all_ext)
        assert small_geom.contains(small_ext)
        assert all_ext.contains(small_ext)
        assert small_ext.area < all_ext.area

        all_count = mv_search_datasets(dc.index,
                                       MVSelectOpts.COUNT,
                                       geom=layer_ext_geom,
                                       layer=lyr)
        small_count = mv_search_datasets(dc.index,
                                         MVSelectOpts.COUNT,
                                         geom=small_geom,
                                         layer=lyr)
        assert small_count <= all_count
def get_config(refresh=False):
    cfg = OWSConfig(refresh=refresh)
    if not cfg.ready:
        with cube() as dc:
            cfg.make_ready(dc)
    return cfg
Ejemplo n.º 10
0
def feature_info(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetFeatureInfoParameters(args)
    feature_json = {}

    geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j)
    # shrink geobox to point
    # Prepare to extract feature info
    if geobox_is_point(params.geobox):
        geo_point_geobox = params.geobox
    else:
        geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon(
            geo_point, params.geobox.resolution, crs=params.geobox.crs)
    tz = tz_for_geometry(geo_point_geobox.geographic_extent)
    stacker = DataStacker(params.product, geo_point_geobox, params.times)
    # --- Begin code section requiring datacube.
    cfg = get_config()
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        datasets = stacker.datasets(dc.index, all_time=True, point=geo_point)

        # Taking the data as a single point so our indexes into the data should be 0,0
        h_coord = cfg.published_CRSs[params.crsid]["horizontal_coord"]
        v_coord = cfg.published_CRSs[params.crsid]["vertical_coord"]
        s3_bucket = cfg.s3_bucket
        s3_url = cfg.s3_url
        isel_kwargs = {h_coord: 0, v_coord: 0}
        if any(datasets):
            # Group datasets by time, load only datasets that match the idx_date
            global_info_written = False
            feature_json["data"] = []
            fi_date_index = {}
            ds_at_times = collapse_datasets_to_times(datasets, params.times,
                                                     tz)
            # ds_at_times["time"].attrs["units"] = 'seconds since 1970-01-01 00:00:00'
            if ds_at_times:
                data = stacker.data(
                    ds_at_times,
                    skip_corrections=True,
                    manual_merge=params.product.data_manual_merge,
                    fuse_func=params.product.fuse_func)
                for dt in data.time.values:
                    td = data.sel(time=dt)
                    # Global data that should apply to all dates, but needs some data to extract
                    if not global_info_written:
                        global_info_written = True
                        # Non-geographic coordinate systems need to be projected onto a geographic
                        # coordinate system.  Why not use EPSG:4326?
                        # Extract coordinates in CRS
                        data_x = getattr(td, h_coord)
                        data_y = getattr(td, v_coord)

                        x = data_x[isel_kwargs[h_coord]].item()
                        y = data_y[isel_kwargs[v_coord]].item()
                        pt = geometry.point(x, y, params.crs)

                        # Project to EPSG:4326
                        crs_geo = geometry.CRS("EPSG:4326")
                        ptg = pt.to_crs(crs_geo)

                        # Capture lat/long coordinates
                        feature_json["lon"], feature_json["lat"] = ptg.coords[
                            0]

                    date_info = {}

                    ds = ds_at_times.sel(time=dt).values.tolist()[0]
                    if params.product.multi_product:
                        date_info["source_product"] = "%s (%s)" % (
                            ds.type.name, ds.metadata_doc["platform"]["code"])

                    # Extract data pixel
                    pixel_ds = td.isel(**isel_kwargs)

                    # Get accurate timestamp from dataset
                    if params.product.is_raw_time_res:
                        date_info["time"] = dataset_center_time(ds).strftime(
                            "%Y-%m-%d %H:%M:%S UTC")
                    else:
                        date_info["time"] = ds.time.begin.strftime("%Y-%m-%d")
                    # Collect raw band values for pixel and derived bands from styles
                    date_info["bands"] = _make_band_dict(
                        params.product, pixel_ds, stacker.needed_bands())
                    derived_band_dict = _make_derived_band_dict(
                        pixel_ds, params.product.style_index)
                    if derived_band_dict:
                        date_info["band_derived"] = derived_band_dict
                    # Add any custom-defined fields.
                    for k, f in params.product.feature_info_custom_includes.items(
                    ):
                        date_info[k] = f(date_info["bands"])

                    feature_json["data"].append(date_info)
                    fi_date_index[dt] = feature_json["data"][-1]

            my_flags = 0
            if params.product.pq_names == params.product.product_names:
                pq_datasets = ds_at_times
            else:
                pq_datasets = stacker.datasets(dc.index,
                                               mask=True,
                                               all_time=False,
                                               point=geo_point)

            if pq_datasets:
                if not params.product.pq_ignore_time:
                    pq_datasets = collapse_datasets_to_times(
                        pq_datasets, params.times, tz)
                pq_data = stacker.data(pq_datasets, mask=True)
                # feature_json["flags"] = []
                for dt in pq_data.time.values:
                    pqd = pq_data.sel(time=dt)
                    date_info = fi_date_index.get(dt)
                    if date_info:
                        if "flags" not in date_info:
                            date_info["flags"] = {}
                    else:
                        date_info = {"flags": {}}
                        feature_json["data"].append(date_info)
                    pq_pixel_ds = pqd.isel(**isel_kwargs)
                    # PQ flags
                    flags = pq_pixel_ds[params.product.pq_band].item()
                    if not flags & ~params.product.info_mask:
                        my_flags = my_flags | flags
                    else:
                        continue
                    for mk, mv in params.product.flags_def.items():
                        if mk in params.product.ignore_info_flags:
                            continue
                        bits = mv["bits"]
                        values = mv["values"]
                        if isinstance(bits, int):
                            flag = 1 << bits
                            if my_flags & flag:
                                val = values['1']
                            else:
                                val = values['0']
                            date_info["flags"][mk] = val
                        else:
                            try:
                                for i in bits:
                                    if not isinstance(i, int):
                                        raise TypeError()
                                # bits is a list of ints try to do it alos way
                                for key, desc in values.items():
                                    if (isinstance(key, str) and key
                                            == str(my_flags)) or (isinstance(
                                                key, int) and key == my_flags):
                                        date_info["flags"][mk] = desc
                                        break
                            except TypeError:
                                pass
            feature_json["data_available_for_dates"] = []
            for d in datasets.coords["time"].values:
                dt_datasets = datasets.sel(time=d)
                dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9)
                if params.product.is_raw_time_res:
                    dt = solar_date(dt, tz)
                pt_native = None
                for ds in dt_datasets.values.item():
                    if pt_native is None:
                        pt_native = geo_point.to_crs(ds.crs)
                    elif pt_native.crs != ds.crs:
                        pt_native = geo_point.to_crs(ds.crs)
                    if ds.extent and ds.extent.contains(pt_native):
                        feature_json["data_available_for_dates"].append(
                            dt.strftime("%Y-%m-%d"))
                        break
            if ds_at_times:
                feature_json["data_links"] = sorted(
                    get_s3_browser_uris(ds_at_times, pt, s3_url, s3_bucket))
            else:
                feature_json["data_links"] = []
            if params.product.feature_info_include_utc_dates:
                unsorted_dates = []
                for tds in datasets:
                    for ds in tds.values.item():
                        if params.product.time_resolution.is_raw_time_res:
                            unsorted_dates.append(
                                ds.center_time.strftime("%Y-%m-%d"))
                        else:
                            unsorted_dates.append(
                                ds.time.begin.strftime("%Y-%m-%d"))
                feature_json["data_available_for_utc_dates"] = sorted(
                    d.center_time.strftime("%Y-%m-%d") for d in datasets)
    # --- End code section requiring datacube.

    result = {
        "type": "FeatureCollection",
        "features": [{
            "type": "Feature",
            "properties": feature_json
        }]
    }
    return json_response(result, cfg)
Ejemplo n.º 11
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    qprof = QueryProfiler(params.ows_stats)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")
    qprof["n_dates"] = n_dates
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = False
        if not zoomed_out:
            qprof.start_event("count-datasets")
            n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT)
            qprof.end_event("count-datasets")
            qprof["n_datasets"] = n_datasets
            too_many_datasets = (params.product.max_datasets_wms > 0 and
                                 n_datasets > params.product.max_datasets_wms)
        if too_many_datasets or zoomed_out:
            qprof["too_many_datasets"] = too_many_datasets
            qprof["zoomed_out"] = zoomed_out
            qprof.start_event("extent-in-query")
            extent = stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT)
            qprof.start_event("extent-in-query")
            if extent is None:
                qprof["write_action"] = "No extent: Write Empty"
                qprof.start_event("write")
                body = _write_empty(params.geobox)
                qprof.end_event("write")
            else:
                qprof["write_action"] = "Polygon"
                qprof.start_event("write")
                body = _write_polygon(params.geobox, extent,
                                      params.product.zoom_fill, params.product)
                qprof.end_event("write")
        elif n_datasets == 0:
            qprof["write_action"] = "No datsets: Write Empty"
            qprof.start_event("write")
            body = _write_empty(params.geobox)
            qprof.end_event("write")
        else:
            qprof.start_event("fetch-datasets")
            datasets = stacker.datasets(dc.index)
            qprof.end_event("fetch-datasets")
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("load-data")
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                fuse_func=params.product.fuse_func)
            qprof.end_event("load-data")
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    qprof.start_event("build-pq-xarray")
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                    qprof.end_event("build-pq-xarray")
                else:
                    qprof.start_event("load-pq-xarray")
                    n_pq_datasets = stacker.datasets(
                        dc.index,
                        mask=True,
                        all_time=params.product.pq_ignore_time,
                        mode=MVSelectOpts.COUNT)
                    if n_pq_datasets > 0:
                        pq_datasets = stacker.datasets(
                            dc.index,
                            mask=True,
                            all_time=params.product.pq_ignore_time,
                            mode=MVSelectOpts.DATASETS)
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            fuse_func=params.product.pq_fuse_func)
                    else:
                        pq_data = None
                    qprof.end_event("load-pq-xarray")
                    qprof["n_pq_datasets"] = n_pq_datasets
            else:
                pq_data = None

            qprof.start_event("build-masks")
            td_masks = []
            for npdt in data.time.values:
                td = data.sel(time=npdt)
                td_ext_mask = None
                for band in params.style.needed_bands:
                    if params.product.pq_band != band:
                        if params.product.data_manual_merge:
                            if td_ext_mask is None:
                                td_ext_mask = ~numpy.isnan(td[band])
                            else:
                                td_ext_mask &= ~numpy.isnan(td[band])
                        else:
                            for f in params.product.extent_mask_func:
                                if td_ext_mask is None:
                                    td_ext_mask = f(td, band)
                                else:
                                    td_ext_mask &= f(td, band)
                if params.product.data_manual_merge:
                    td_ext_mask = xarray.DataArray(td_ext_mask)
                td_masks.append(td_ext_mask)
            extent_mask = xarray.concat(td_masks, dim=data.time)
            qprof.end_event("build-masks")

            if not data or (params.style.masks and not pq_data):
                qprof["write_action"] = "No Data: Write Empty"
                body = _write_empty(params.geobox)
            else:
                qprof["write_action"] = "Write Data"
                body = _write_png(data, pq_data, params.style, extent_mask,
                                  params.geobox, qprof)

    if params.ows_stats:
        return json_response(qprof.profile())
    else:
        return png_response(body)
Ejemplo n.º 12
0
def get_coverage_data(request):
    #pylint: disable=too-many-locals, protected-access

    cfg = get_config()

    layer_name = request.coverage_id
    layer = cfg.product_index.get(layer_name)
    if not layer or not layer.wcs:
        raise WCS2Exception("Invalid coverage: %s" % layer_name,
                            WCS2Exception.NO_SUCH_COVERAGE,
                            locator="COVERAGE parameter",
                            valid_keys=list(cfg.product_index))

    with cube() as dc:
        if not dc:
            raise WCS2Exception("Database connectivity failure")
        #
        # CRS handling
        #

        native_crs = layer.native_CRS
        subsetting_crs = uniform_crs(cfg, request.subsetting_crs or native_crs)
        output_crs = uniform_crs(cfg, request.output_crs or subsetting_crs)

        if subsetting_crs not in cfg.published_CRSs:
            raise WCS2Exception("Invalid subsettingCrs: %s" % subsetting_crs,
                                WCS2Exception.SUBSETTING_CRS_NOT_SUPPORTED,
                                locator=subsetting_crs,
                                valid_keys=list(cfg.published_CRSs))

        output_crs = uniform_crs(
            cfg, request.output_crs or subsetting_crs or native_crs)

        if output_crs not in cfg.published_CRSs:
            raise WCS2Exception("Invalid outputCrs: %s" % output_crs,
                                WCS2Exception.OUTPUT_CRS_NOT_SUPPORTED,
                                locator=output_crs,
                                valid_keys=list(cfg.published_CRSs))

        #
        # Subsetting/Scaling
        #

        scaler = WCSScaler(layer, subsetting_crs)
        times = layer.ranges["times"]

        subsets = request.subsets

        if len(subsets) != len(
                set(subset.dimension.lower() for subset in subsets)):
            dimensions = [subset.dimension.lower() for subset in subsets]
            duplicate_dimensions = [
                item
                for item, count in collections.Counter(dimensions).items()
                if count > 1
            ]

            raise WCS2Exception("Duplicate dimension%s: %s" %
                                ('s' if len(duplicate_dimensions) > 1 else '',
                                 ', '.join(duplicate_dimensions)),
                                WCS2Exception.INVALID_SUBSETTING,
                                locator=','.join(duplicate_dimensions))

        for subset in subsets:
            dimension = subset.dimension.lower()
            if dimension == 'time':
                if isinstance(subset, Trim):
                    low = parse(
                        subset.low).date() if subset.low is not None else None
                    high = parse(subset.high).date(
                    ) if subset.high is not None else None
                    if low is not None:
                        times = [time for time in times if time >= low]
                    if high is not None:
                        times = [time for time in times if time <= high]
                elif isinstance(subset, Slice):
                    point = parse(subset.point).date()
                    times = [point]

            else:
                try:
                    if isinstance(subset, Trim):
                        scaler.trim(dimension, subset.low, subset.high)
                    elif isinstance(subset, Slice):
                        scaler.slice(dimension, subset.point)
                except WCSScalerUnknownDimension:
                    raise WCS2Exception('Invalid subsetting axis %s' %
                                        subset.dimension,
                                        WCS2Exception.INVALID_AXIS_LABEL,
                                        locator=subset.dimension)

        #
        # Transform spatial extent to native CRS.
        #
        scaler.to_crs(output_crs)

        #
        # Scaling
        #

        scales = request.scales
        if len(scales) != len(set(subset.axis.lower() for subset in scales)):
            axes = [subset.axis.lower() for subset in scales]
            duplicate_axes = [
                item for item, count in collections.Counter(axes).items()
                if count > 1
            ]
            raise WCS2Exception('Duplicate scales for ax%ss: %s' %
                                ('i' if len(duplicate_axes) == 1 else 'e',
                                 ', '.join(duplicate_axes)),
                                WCS2Exception.INVALID_SCALE_FACTOR,
                                locator=','.join(duplicate_axes))

        for scale in scales:
            axis = scale.axis.lower()

            if axis in ('time', 'k'):
                raise WCS2Exception('Cannot scale axis %s' % scale.axis,
                                    WCS2Exception.INVALID_SCALE_FACTOR,
                                    locator=scale.axis)
            else:
                if isinstance(scale, ScaleAxis):
                    scaler.scale_axis(axis, scale.factor)
                elif isinstance(scale, ScaleSize):
                    scaler.scale_size(axis, scale.size)
                elif isinstance(scale, ScaleExtent):
                    scaler.scale_extent(axis, scale.low, scale.high)

        #
        # Rangesubset
        #

        band_labels = layer.band_idx.band_labels()
        if request.range_subset:
            bands = []
            for range_subset in request.range_subset:
                if isinstance(range_subset, str):
                    if range_subset not in band_labels:
                        raise WCS2Exception('No such field %s' % range_subset,
                                            WCS2Exception.NO_SUCH_FIELD,
                                            locator=range_subset,
                                            valid_keys=band_labels)
                    bands.append(range_subset)
                else:
                    if range_subset.start not in band_labels:
                        raise WCS2Exception(
                            'No such field %s' % range_subset.start,
                            WCS2Exception.ILLEGAL_FIELD_SEQUENCE,
                            locator=range_subset.start,
                            valid_keys=band_labels)
                    if range_subset.end not in band_labels:
                        raise WCS2Exception(
                            'No such field %s' % range_subset.end,
                            WCS2Exception.ILLEGAL_FIELD_SEQUENCE,
                            locator=range_subset.end,
                            valid_keys=band_labels)

                    start = band_labels.index(range_subset.start)
                    end = band_labels.index(range_subset.end)
                    bands.extend(
                        band_labels[start:(end + 1) if end > start else (end -
                                                                         1)])
        else:
            bands = layer.wcs_default_bands  # TODO: standard says differently

        #
        # Format handling
        #

        if not request.format:
            fmt = cfg.wcs_formats_by_name[layer.native_format]
        else:
            try:
                fmt = cfg.wcs_formats_by_mime[request.format]
            except KeyError:
                raise WCS2Exception("Unsupported format: %s" % request.format,
                                    WCS2Exception.INVALID_PARAMETER_VALUE,
                                    locator="FORMAT",
                                    valid_keys=list(cfg.wcs_formats_by_mime))

        if len(times) > 1 and not fmt.multi_time:
            raise WCS2Exception(
                "Format does not support multi-time datasets - "
                "either constrain the time dimension or choose a different format",
                WCS2Exception.INVALID_SUBSETTING,
                locator="FORMAT or SUBSET")
        affine = scaler.affine()
        geobox = geometry.GeoBox(scaler.size.x, scaler.size.y, affine,
                                 cfg.crs(output_crs))

        stacker = DataStacker(layer, geobox, times, bands=bands)
        n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT)

        if layer.max_datasets_wcs > 0 and n_datasets > layer.max_datasets_wcs:
            raise WCS2Exception(
                "This request processes too much data to be served in a reasonable amount of time."
                "Please reduce the bounds of your request and try again."
                "(max: %d, this request requires: %d)" %
                (layer.max_datasets_wcs, n_datasets))
        elif n_datasets == 0:
            raise WCS2Exception(
                "The requested spatio-temporal subsets return no data.",
                WCS2Exception.INVALID_SUBSETTING,
                http_response=404)

        datasets = stacker.datasets(dc.index)
        if fmt.multi_time and len(times) > 1:
            # Group by solar day
            group_by = datacube.api.query.query_group_by(time=times,
                                                         group_by='solar_day')
            datasets = dc.group_datasets(datasets, group_by)

        output = stacker.data(datasets, skip_corrections=True)

        # Clean extent flag band from output
        for k, v in output.data_vars.items():
            if k not in bands:
                output = output.drop_vars([k])

    #
    # TODO: configurable
    #
    if fmt.mime == 'image/geotiff':
        output = fmt.renderer(request.version)(request, output, output_crs,
                                               layer, scaler.size.x,
                                               scaler.size.y, affine)

    else:
        output = fmt.renderer(request.version)(request, output, output_crs)

    headers = {
        "Content-Type":
        fmt.mime,
        'content-disposition':
        f'attachment; filename={request.coverage_id}.{fmt.extension}',
    }
    headers.update(layer.wcs_cache_rules.cache_headers(n_datasets))
    return output, headers
Ejemplo n.º 13
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")

    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        datasets = stacker.datasets(dc.index)
        n_datasets = datasets_in_xarray(datasets)
        zoomed_out = params.zf < params.product.min_zoom
        too_many_datasets = (params.product.max_datasets_wms > 0
                             and n_datasets > params.product.max_datasets_wms)
        if n_datasets == 0:
            body = _write_empty(params.geobox)
        elif too_many_datasets:
            body = _write_polygon(params.geobox, params.geobox.extent,
                                  params.product.zoom_fill)
        elif zoomed_out:
            # Zoomed out to far to properly render data.
            # Construct a polygon which is the union of the extents of the matching datasets.
            extent = None
            extent_crs = None
            for dt in datasets.time.values:
                _dt = str(dt).split('T')[0]
                tds = datasets.sel(time=_dt)
                for ds in tds.values.item():
                    if extent:
                        new_extent = bbox_to_geom(ds.extent.boundingbox,
                                                  ds.extent.crs)
                        if new_extent.crs != extent_crs:
                            new_extent = new_extent.to_crs(extent_crs)
                        extent = extent.union(new_extent)
                    else:
                        extent = bbox_to_geom(ds.extent.boundingbox,
                                              ds.extent.crs)
                        extent_crs = extent.crs
            extent = extent.to_crs(params.crs)
            body = _write_polygon(params.geobox, extent,
                                  params.product.zoom_fill)
        else:
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            data = stacker.data(datasets,
                                manual_merge=params.product.data_manual_merge,
                                fuse_func=params.product.fuse_func)
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            if params.style.masks:
                if params.product.pq_name == params.product.name:
                    pq_band_data = (
                        data[params.product.pq_band].dims,
                        data[params.product.pq_band].astype("uint16"))
                    pq_data = xarray.Dataset(
                        {params.product.pq_band: pq_band_data},
                        coords=data[params.product.pq_band].coords)
                    flag_def = data[params.product.pq_band].flags_definition
                    pq_data[params.product.
                            pq_band].attrs["flags_definition"] = flag_def
                else:
                    pq_datasets = stacker.datasets(
                        dc.index,
                        mask=True,
                        all_time=params.product.pq_ignore_time)
                    n_pq_datasets = datasets_in_xarray(pq_datasets)
                    if n_pq_datasets > 0:
                        pq_data = stacker.data(
                            pq_datasets,
                            mask=True,
                            manual_merge=params.product.pq_manual_merge,
                            fuse_func=params.product.pq_fuse_func)
                    else:
                        pq_data = None
            else:
                pq_data = None

            extent_mask = None
            if not params.product.data_manual_merge:
                td_masks = []
                for npdt in data.time.values:
                    td = data.sel(time=npdt)
                    td_ext_mask = None
                    for band in params.style.needed_bands:
                        for f in params.product.extent_mask_func:
                            if td_ext_mask is None:
                                td_ext_mask = f(td, band)
                            else:
                                td_ext_mask &= f(td, band)
                    td_masks.append(td_ext_mask)
                extent_mask = xarray.concat(td_masks, dim=data.time)
                #    extent_mask.add_time(td.time, ext_mask)

            if not data or (params.style.masks and not pq_data):
                body = _write_empty(params.geobox)
            else:
                body = _write_png(data, pq_data, params.style, extent_mask,
                                  params.geobox)

    cfg = get_config()
    return body, 200, cfg.response_headers({"Content-Type": "image/png"})
Ejemplo n.º 14
0
def test_full_layer():
    cfg = get_config()
    lyr = list(cfg.product_index.values())[0]
    with cube() as dc:
        sel = mv_search_datasets(dc.index, MVSelectOpts.COUNT, layer=lyr)
        assert sel > 0
Ejemplo n.º 15
0
def get_map(args):
    # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
    # Parse GET parameters
    params = GetMapParameters(args)
    qprof = QueryProfiler(params.ows_stats)
    n_dates = len(params.times)
    if n_dates == 1:
        mdh = None
    else:
        mdh = params.style.get_multi_date_handler(n_dates)
        if mdh is None:
            raise WMSException(
                "Style %s does not support GetMap requests with %d dates" %
                (params.style.name, n_dates),
                WMSException.INVALID_DIMENSION_VALUE,
                locator="Time parameter")
    qprof["n_dates"] = n_dates
    with cube() as dc:
        if not dc:
            raise WMSException("Database connectivity failure")
        # Tiling.
        stacker = DataStacker(params.product,
                              params.geobox,
                              params.times,
                              params.resampling,
                              style=params.style)
        zoomed_out = params.zf < params.product.min_zoom
        qprof["zoom_factor"] = params.zf
        qprof.start_event("count-datasets")
        n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT)
        qprof.end_event("count-datasets")
        qprof["n_datasets"] = n_datasets
        too_many_datasets = (params.product.max_datasets_wms > 0
                             and n_datasets > params.product.max_datasets_wms)
        if qprof.active:
            qprof["datasets"] = stacker.datasets(dc.index,
                                                 mode=MVSelectOpts.IDS)
        if too_many_datasets or zoomed_out:
            stacker.resource_limited = True
            qprof["too_many_datasets"] = too_many_datasets
            qprof["zoomed_out"] = zoomed_out

        if stacker.resource_limited and not params.product.low_res_product_names:
            qprof.start_event("extent-in-query")
            extent = stacker.datasets(dc.index, mode=MVSelectOpts.EXTENT)
            qprof.end_event("extent-in-query")
            if extent is None:
                qprof["write_action"] = "No extent: Write Empty"
                qprof.start_event("write")
                body = _write_empty(params.geobox)
                qprof.end_event("write")
            else:
                qprof["write_action"] = "Polygon"
                qprof.start_event("write")
                body = _write_polygon(params.geobox, extent,
                                      params.product.zoom_fill, params.product)
                qprof.end_event("write")
        elif n_datasets == 0:
            qprof["write_action"] = "No datsets: Write Empty"
            qprof.start_event("write")
            body = _write_empty(params.geobox)
            qprof.end_event("write")
        else:
            if stacker.resource_limited:
                qprof.start_event("count-summary-datasets")
                qprof["n_summary_datasets"] = stacker.datasets(
                    dc.index, mode=MVSelectOpts.COUNT)
                qprof.end_event("count-summary-datasets")
            qprof.start_event("fetch-datasets")
            datasets = stacker.datasets(dc.index)
            for flagband, dss in datasets.items():
                if not dss.any():
                    _LOG.warning("Flag band %s returned no data",
                                 str(flagband))
            qprof.end_event("fetch-datasets")
            _LOG.debug("load start %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("load-data")
            data = stacker.data(datasets)
            qprof.end_event("load-data")
            _LOG.debug("load stop %s %s",
                       datetime.now().time(), args["requestid"])
            qprof.start_event("build-masks")
            td_masks = []
            for npdt in data.time.values:
                td = data.sel(time=npdt)
                td_ext_mask = None
                for band in params.style.needed_bands:
                    if band not in params.style.flag_bands:
                        if params.product.data_manual_merge:
                            if td_ext_mask is None:
                                td_ext_mask = ~numpy.isnan(td[band])
                            else:
                                td_ext_mask &= ~numpy.isnan(td[band])
                        else:
                            for f in params.product.extent_mask_func:
                                if td_ext_mask is None:
                                    td_ext_mask = f(td, band)
                                else:
                                    td_ext_mask &= f(td, band)
                if params.product.data_manual_merge:
                    td_ext_mask = xarray.DataArray(td_ext_mask)
                td_masks.append(td_ext_mask)
            extent_mask = xarray.concat(td_masks, dim=data.time)
            qprof.end_event("build-masks")

            if not data:
                qprof["write_action"] = "No Data: Write Empty"
                body = _write_empty(params.geobox)
            else:
                qprof["write_action"] = "Write Data"
                body = _write_png(data, params.style, extent_mask,
                                  params.geobox, qprof)

    if params.ows_stats:
        return json_response(qprof.profile())
    else:
        return png_response(
            body,
            extra_headers=params.product.wms_cache_rules.cache_headers(
                n_datasets))