def get_coverage_data(req): #pylint: disable=too-many-locals, protected-access with cube() as dc: if not dc: raise WCS1Exception("Database connectivity failure") stacker = DataStacker(req.product, req.geobox, req.times, bands=req.bands) n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT) if n_datasets == 0: # Return an empty coverage file with full metadata? cfg = get_config() x_range = (req.minx, req.maxx) y_range = (req.miny, req.maxy) xname = cfg.published_CRSs[req.request_crsid]["horizontal_coord"] yname = cfg.published_CRSs[req.request_crsid]["vertical_coord"] xvals = numpy.linspace(x_range[0], x_range[1], num=req.width) yvals = numpy.linspace(y_range[0], y_range[1], num=req.height) if cfg.published_CRSs[req.request_crsid]["vertical_coord_first"]: nparrays = { band: (("time", yname, xname), numpy.full((len(req.times), len(yvals), len(xvals)), req.product.band_idx.nodata_val(band))) for band in req.bands } else: nparrays = { band: (("time", xname, yname), numpy.full((len(req.times), len(xvals), len(yvals)), req.product.band_idx.nodata_val(band))) for band in req.bands } data = xarray.Dataset(nparrays, coords={ "time": req.times, xname: xvals, yname: yvals, }).astype("int16") return n_datasets, data if req.product.max_datasets_wcs > 0 and n_datasets > req.product.max_datasets_wcs: raise WCS1Exception( "This request processes too much data to be served in a reasonable amount of time." "Please reduce the bounds of your request and try again." "(max: %d, this request requires: %d)" % (req.product.max_datasets_wcs, n_datasets)) datasets = stacker.datasets(index=dc.index) stacker = DataStacker(req.product, req.geobox, req.times, bands=req.bands) output = stacker.data(datasets, skip_corrections=True) return n_datasets, output
def get_coverage_data(request): #pylint: disable=too-many-locals, protected-access cfg = get_config() layer_name = request.coverage_id layer = cfg.product_index.get(layer_name) if not layer or not layer.wcs: raise WCS2Exception("Invalid coverage: %s" % layer_name, WCS2Exception.NO_SUCH_COVERAGE, locator="COVERAGE parameter", valid_keys=list(cfg.product_index)) with cube() as dc: if not dc: raise WCS2Exception("Database connectivity failure") # # CRS handling # native_crs = layer.native_CRS subsetting_crs = uniform_crs(cfg, request.subsetting_crs or native_crs) output_crs = uniform_crs(cfg, request.output_crs or subsetting_crs) if subsetting_crs not in cfg.published_CRSs: raise WCS2Exception("Invalid subsettingCrs: %s" % subsetting_crs, WCS2Exception.SUBSETTING_CRS_NOT_SUPPORTED, locator=subsetting_crs, valid_keys=list(cfg.published_CRSs)) output_crs = uniform_crs( cfg, request.output_crs or subsetting_crs or native_crs) if output_crs not in cfg.published_CRSs: raise WCS2Exception("Invalid outputCrs: %s" % output_crs, WCS2Exception.OUTPUT_CRS_NOT_SUPPORTED, locator=output_crs, valid_keys=list(cfg.published_CRSs)) # # Subsetting/Scaling # scaler = WCSScaler(layer, subsetting_crs) times = layer.ranges["times"] subsets = request.subsets if len(subsets) != len( set(subset.dimension.lower() for subset in subsets)): dimensions = [subset.dimension.lower() for subset in subsets] duplicate_dimensions = [ item for item, count in collections.Counter(dimensions).items() if count > 1 ] raise WCS2Exception("Duplicate dimension%s: %s" % ('s' if len(duplicate_dimensions) > 1 else '', ', '.join(duplicate_dimensions)), WCS2Exception.INVALID_SUBSETTING, locator=','.join(duplicate_dimensions)) for subset in subsets: dimension = subset.dimension.lower() if dimension == 'time': if isinstance(subset, Trim): low = parse( subset.low).date() if subset.low is not None else None high = parse(subset.high).date( ) if subset.high is not None else None if low is not None: times = [time for time in times if time >= low] if high is not None: times = [time for time in times if time <= high] elif isinstance(subset, Slice): point = parse(subset.point).date() times = [point] else: try: if isinstance(subset, Trim): scaler.trim(dimension, subset.low, subset.high) elif isinstance(subset, Slice): scaler.slice(dimension, subset.point) except WCSScalerUnknownDimension: raise WCS2Exception('Invalid subsetting axis %s' % subset.dimension, WCS2Exception.INVALID_AXIS_LABEL, locator=subset.dimension) # # Transform spatial extent to native CRS. # scaler.to_crs(output_crs) # # Scaling # scales = request.scales if len(scales) != len(set(subset.axis.lower() for subset in scales)): axes = [subset.axis.lower() for subset in scales] duplicate_axes = [ item for item, count in collections.Counter(axes).items() if count > 1 ] raise WCS2Exception('Duplicate scales for ax%ss: %s' % ('i' if len(duplicate_axes) == 1 else 'e', ', '.join(duplicate_axes)), WCS2Exception.INVALID_SCALE_FACTOR, locator=','.join(duplicate_axes)) for scale in scales: axis = scale.axis.lower() if axis in ('time', 'k'): raise WCS2Exception('Cannot scale axis %s' % scale.axis, WCS2Exception.INVALID_SCALE_FACTOR, locator=scale.axis) else: if isinstance(scale, ScaleAxis): scaler.scale_axis(axis, scale.factor) elif isinstance(scale, ScaleSize): scaler.scale_size(axis, scale.size) elif isinstance(scale, ScaleExtent): scaler.scale_extent(axis, scale.low, scale.high) # # Rangesubset # band_labels = layer.band_idx.band_labels() if request.range_subset: bands = [] for range_subset in request.range_subset: if isinstance(range_subset, str): if range_subset not in band_labels: raise WCS2Exception('No such field %s' % range_subset, WCS2Exception.NO_SUCH_FIELD, locator=range_subset, valid_keys=band_labels) bands.append(range_subset) else: if range_subset.start not in band_labels: raise WCS2Exception( 'No such field %s' % range_subset.start, WCS2Exception.ILLEGAL_FIELD_SEQUENCE, locator=range_subset.start, valid_keys=band_labels) if range_subset.end not in band_labels: raise WCS2Exception( 'No such field %s' % range_subset.end, WCS2Exception.ILLEGAL_FIELD_SEQUENCE, locator=range_subset.end, valid_keys=band_labels) start = band_labels.index(range_subset.start) end = band_labels.index(range_subset.end) bands.extend( band_labels[start:(end + 1) if end > start else (end - 1)]) else: bands = layer.wcs_default_bands # TODO: standard says differently # # Format handling # if not request.format: fmt = cfg.wcs_formats_by_name[layer.native_format] else: try: fmt = cfg.wcs_formats_by_mime[request.format] except KeyError: raise WCS2Exception("Unsupported format: %s" % request.format, WCS2Exception.INVALID_PARAMETER_VALUE, locator="FORMAT", valid_keys=list(cfg.wcs_formats_by_mime)) if len(times) > 1 and not fmt.multi_time: raise WCS2Exception( "Format does not support multi-time datasets - " "either constrain the time dimension or choose a different format", WCS2Exception.INVALID_SUBSETTING, locator="FORMAT or SUBSET") affine = scaler.affine() geobox = geometry.GeoBox(scaler.size.x, scaler.size.y, affine, cfg.crs(output_crs)) stacker = DataStacker(layer, geobox, times, bands=bands) n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT) if layer.max_datasets_wcs > 0 and n_datasets > layer.max_datasets_wcs: raise WCS2Exception( "This request processes too much data to be served in a reasonable amount of time." "Please reduce the bounds of your request and try again." "(max: %d, this request requires: %d)" % (layer.max_datasets_wcs, n_datasets)) elif n_datasets == 0: raise WCS2Exception( "The requested spatio-temporal subsets return no data.", WCS2Exception.INVALID_SUBSETTING, http_response=404) datasets = stacker.datasets(dc.index) if fmt.multi_time and len(times) > 1: # Group by solar day group_by = datacube.api.query.query_group_by(time=times, group_by='solar_day') datasets = dc.group_datasets(datasets, group_by) output = stacker.data(datasets, skip_corrections=True) # Clean extent flag band from output for k, v in output.data_vars.items(): if k not in bands: output = output.drop_vars([k]) # # TODO: configurable # if fmt.mime == 'image/geotiff': output = fmt.renderer(request.version)(request, output, output_crs, layer, scaler.size.x, scaler.size.y, affine) else: output = fmt.renderer(request.version)(request, output, output_crs) headers = { "Content-Type": fmt.mime, 'content-disposition': f'attachment; filename={request.coverage_id}.{fmt.extension}', } headers.update(layer.wcs_cache_rules.cache_headers(n_datasets)) return output, headers
def get_coverage_data(req): #pylint: disable=too-many-locals, protected-access dc = get_cube() datasets = [] for t in req.times: # IF t was passed to the datasets method instead of the stacker # constructor, we could use the one stacker. stacker = DataStacker(req.product, req.geobox, t, bands=req.bands) t_datasets = stacker.datasets(dc.index) if not t_datasets: # No matching data for this date continue datasets.extend(t_datasets) if not datasets: # TODO: Return an empty coverage file with full metadata? extents = dc.load(dask_chunks={}, product=req.product.product.name, geopolygon=req.geobox.extent, time=stacker._time) cfg = get_config() x_range = (req.minx, req.maxx) y_range = (req.miny, req.maxy) xname = cfg.published_CRSs[req.request_crsid]["horizontal_coord"] yname = cfg.published_CRSs[req.request_crsid]["vertical_coord"] if xname in extents: xvals = extents[xname] else: xvals = numpy.linspace( x_range[0], x_range[1], num=req.width ) if yname in extents: yvals = extents[yname] else: yvals = numpy.linspace( y_range[0], y_range[1], num=req.height ) if cfg.published_CRSs[req.request_crsid]["vertical_coord_first"]: nparrays = { band: ((yname, xname), numpy.full((len(yvals), len(xvals)), req.product.nodata_dict[band]) ) for band in req.bands } else: nparrays = { band: ((xname, yname), numpy.full((len(xvals), len(yvals)), req.product.nodata_dict[band]) ) for band in req.bands } data = xarray.Dataset( nparrays, coords={ xname: xvals, yname: yvals, } ).astype("int16") release_cube(dc) return data if req.product.max_datasets_wcs > 0 and len(datasets) > req.product.max_datasets_wcs: raise WCS1Exception("This request processes too much data to be served in a reasonable amount of time." "Please reduce the bounds of your request and try again." "(max: %d, this request requires: %d)" % (req.product.max_datasets_wcs, len(datasets))) if req.format["multi-time"] and len(req.times) > 1: # Group by solar day group_by = datacube.api.query.query_group_by(time=req.times, group_by='solar_day') datasets = dc.group_datasets(datasets, group_by) stacker = DataStacker(req.product, req.geobox, req.times[0], bands=req.bands) output = stacker.data(datasets, skip_corrections=True) release_cube(dc) return output