def test_scale_size(layer_crs_geom): scaler = WCSScaler(layer_crs_geom, "EPSG:4326") scaler.to_crs("EPSG:3577") scaler.scale_size("x", 512) scaler.scale_size("y", 512) assert scaler.dim("x") == (512, -2407984.8524648934, 2834259.110253384) assert scaler.dim("y") == (512, -5195512.771063174, -936185.3115191332)
def get_coverage_data(request): #pylint: disable=too-many-locals, protected-access cfg = get_config() layer_name = request.coverage_id layer = cfg.product_index.get(layer_name) if not layer or not layer.wcs: raise WCS2Exception("Invalid coverage: %s" % layer_name, WCS2Exception.NO_SUCH_COVERAGE, locator="COVERAGE parameter", valid_keys=list(cfg.product_index)) with cube() as dc: if not dc: raise WCS2Exception("Database connectivity failure") # # CRS handling # native_crs = layer.native_CRS subsetting_crs = uniform_crs(cfg, request.subsetting_crs or native_crs) output_crs = uniform_crs(cfg, request.output_crs or subsetting_crs) if subsetting_crs not in cfg.published_CRSs: raise WCS2Exception("Invalid subsettingCrs: %s" % subsetting_crs, WCS2Exception.SUBSETTING_CRS_NOT_SUPPORTED, locator=subsetting_crs, valid_keys=list(cfg.published_CRSs)) output_crs = uniform_crs( cfg, request.output_crs or subsetting_crs or native_crs) if output_crs not in cfg.published_CRSs: raise WCS2Exception("Invalid outputCrs: %s" % output_crs, WCS2Exception.OUTPUT_CRS_NOT_SUPPORTED, locator=output_crs, valid_keys=list(cfg.published_CRSs)) # # Subsetting/Scaling # scaler = WCSScaler(layer, subsetting_crs) times = layer.ranges["times"] subsets = request.subsets if len(subsets) != len( set(subset.dimension.lower() for subset in subsets)): dimensions = [subset.dimension.lower() for subset in subsets] duplicate_dimensions = [ item for item, count in collections.Counter(dimensions).items() if count > 1 ] raise WCS2Exception("Duplicate dimension%s: %s" % ('s' if len(duplicate_dimensions) > 1 else '', ', '.join(duplicate_dimensions)), WCS2Exception.INVALID_SUBSETTING, locator=','.join(duplicate_dimensions)) for subset in subsets: dimension = subset.dimension.lower() if dimension == 'time': if isinstance(subset, Trim): low = parse( subset.low).date() if subset.low is not None else None high = parse(subset.high).date( ) if subset.high is not None else None if low is not None: times = [time for time in times if time >= low] if high is not None: times = [time for time in times if time <= high] elif isinstance(subset, Slice): point = parse(subset.point).date() times = [point] else: try: if isinstance(subset, Trim): scaler.trim(dimension, subset.low, subset.high) elif isinstance(subset, Slice): scaler.slice(dimension, subset.point) except WCSScalerUnknownDimension: raise WCS2Exception('Invalid subsetting axis %s' % subset.dimension, WCS2Exception.INVALID_AXIS_LABEL, locator=subset.dimension) # # Transform spatial extent to native CRS. # scaler.to_crs(output_crs) # # Scaling # scales = request.scales if len(scales) != len(set(subset.axis.lower() for subset in scales)): axes = [subset.axis.lower() for subset in scales] duplicate_axes = [ item for item, count in collections.Counter(axes).items() if count > 1 ] raise WCS2Exception('Duplicate scales for ax%ss: %s' % ('i' if len(duplicate_axes) == 1 else 'e', ', '.join(duplicate_axes)), WCS2Exception.INVALID_SCALE_FACTOR, locator=','.join(duplicate_axes)) for scale in scales: axis = scale.axis.lower() if axis in ('time', 'k'): raise WCS2Exception('Cannot scale axis %s' % scale.axis, WCS2Exception.INVALID_SCALE_FACTOR, locator=scale.axis) else: if isinstance(scale, ScaleAxis): scaler.scale_axis(axis, scale.factor) elif isinstance(scale, ScaleSize): scaler.scale_size(axis, scale.size) elif isinstance(scale, ScaleExtent): scaler.scale_extent(axis, scale.low, scale.high) # # Rangesubset # band_labels = layer.band_idx.band_labels() if request.range_subset: bands = [] for range_subset in request.range_subset: if isinstance(range_subset, str): if range_subset not in band_labels: raise WCS2Exception('No such field %s' % range_subset, WCS2Exception.NO_SUCH_FIELD, locator=range_subset, valid_keys=band_labels) bands.append(range_subset) else: if range_subset.start not in band_labels: raise WCS2Exception( 'No such field %s' % range_subset.start, WCS2Exception.ILLEGAL_FIELD_SEQUENCE, locator=range_subset.start, valid_keys=band_labels) if range_subset.end not in band_labels: raise WCS2Exception( 'No such field %s' % range_subset.end, WCS2Exception.ILLEGAL_FIELD_SEQUENCE, locator=range_subset.end, valid_keys=band_labels) start = band_labels.index(range_subset.start) end = band_labels.index(range_subset.end) bands.extend( band_labels[start:(end + 1) if end > start else (end - 1)]) else: bands = layer.wcs_default_bands # TODO: standard says differently # # Format handling # if not request.format: fmt = cfg.wcs_formats_by_name[layer.native_format] else: try: fmt = cfg.wcs_formats_by_mime[request.format] except KeyError: raise WCS2Exception("Unsupported format: %s" % request.format, WCS2Exception.INVALID_PARAMETER_VALUE, locator="FORMAT", valid_keys=list(cfg.wcs_formats_by_mime)) if len(times) > 1 and not fmt.multi_time: raise WCS2Exception( "Format does not support multi-time datasets - " "either constrain the time dimension or choose a different format", WCS2Exception.INVALID_SUBSETTING, locator="FORMAT or SUBSET") affine = scaler.affine() geobox = geometry.GeoBox(scaler.size.x, scaler.size.y, affine, cfg.crs(output_crs)) stacker = DataStacker(layer, geobox, times, bands=bands) n_datasets = stacker.datasets(dc.index, mode=MVSelectOpts.COUNT) if layer.max_datasets_wcs > 0 and n_datasets > layer.max_datasets_wcs: raise WCS2Exception( "This request processes too much data to be served in a reasonable amount of time." "Please reduce the bounds of your request and try again." "(max: %d, this request requires: %d)" % (layer.max_datasets_wcs, n_datasets)) elif n_datasets == 0: raise WCS2Exception( "The requested spatio-temporal subsets return no data.", WCS2Exception.INVALID_SUBSETTING, http_response=404) datasets = stacker.datasets(dc.index) if fmt.multi_time and len(times) > 1: # Group by solar day group_by = datacube.api.query.query_group_by(time=times, group_by='solar_day') datasets = dc.group_datasets(datasets, group_by) output = stacker.data(datasets, skip_corrections=True) # Clean extent flag band from output for k, v in output.data_vars.items(): if k not in bands: output = output.drop_vars([k]) # # TODO: configurable # if fmt.mime == 'image/geotiff': output = fmt.renderer(request.version)(request, output, output_crs, layer, scaler.size.x, scaler.size.y, affine) else: output = fmt.renderer(request.version)(request, output, output_crs) headers = { "Content-Type": fmt.mime, 'content-disposition': f'attachment; filename={request.coverage_id}.{fmt.extension}', } headers.update(layer.wcs_cache_rules.cache_headers(n_datasets)) return output, headers