def __init__(self, args): # Version self.version = get_arg(args, "version", "WMS version", permitted_values=['1.1.1', '1.3.0']) # CRS if self.version == '1.1.1': crs_arg = "srs" else: crs_arg = "crs" self.crsid = get_arg( args, crs_arg, "Coordinate Reference System", errcode=WMSException.INVALID_CRS, permitted_values=get_service_cfg().published_CRSs.keys()) self.crs = geometry.CRS(self.crsid) # Layers self.product = self.get_product(args) self.raw_product = self.get_raw_product(args) self.geometry = _get_polygon(args, self.crs) # BBox, height and width parameters self.geobox = _get_geobox(args, self.crs) # Time parameter self.time = get_time(args, self.product, self.raw_product) self.method_specific_init(args)
def img_coords_to_geopoint(geobox, i, j): service_cfg = get_service_cfg() h_coord = service_cfg.published_CRSs[geobox.crs.crs_str]["horizontal_coord"] v_coord = service_cfg.published_CRSs[geobox.crs.crs_str]["vertical_coord"] return geometry.point(geobox.coordinates[h_coord].values[int(i)], geobox.coordinates[v_coord].values[int(j)], geobox.crs)
def desc_coverages(args): # Note: Only WCS v1.0.0 is fully supported at this stage, so no version negotiation is necessary # Extract layer metadata from Datacube. platforms = get_layers(refresh=True) coverages = args.get("coverage") products = [] if coverages: coverages = coverages.split(",") for c in coverages: p = platforms.product_index.get(c) if p: products.append(p) else: raise WCS1Exception("Invalid coverage: %s" % c, WCS1Exception.COVERAGE_NOT_DEFINED, locator="Coverage parameter") else: for plat in platforms: for p in plat.products: products.append(p) return (render_template("wcs_desc_coverage.xml", service=get_service_cfg(), products=products), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "max-age=10" }))
def get_capabilities(args): # TODO: Handle updatesequence request parameter for cache consistency. # Note: Only WMS v1.0.0 exists at this stage, so no version negotiation is necessary # Extract layer metadata from Datacube. platforms = get_layers(refresh=True) service_cfg = get_service_cfg() url = args.get('Host', args['url_root']) base_url = get_service_base_url(service_cfg.allowed_urls, url) section = args.get("section") if section: section = section.lower() show_service_id = False show_service_provider = False show_ops_metadata = False show_contents = False show_themes = False if section is None: show_service_id = True show_service_provider = True show_ops_metadata = True show_contents = True show_themes = True else: sections = section.split(",") for s in sections: if s == "all": show_service_id = True show_service_provider = True show_ops_metadata = True show_contents = True show_themes = True elif s == "serviceidentification": show_service_id = True elif s == "serviceprovider": show_service_provider = True elif s == "operationsmetadata": show_ops_metadata = True elif s == "contents": show_contents = True elif s == "themes": show_themes = True else: raise WMTSException("Invalid section: %s" % section, WMTSException.INVALID_PARAMETER_VALUE, locator="Section parameter") return (render_template("wmts_capabilities.xml", service=service_cfg, platforms=platforms, base_url=base_url, show_service_id=show_service_id, show_service_provider=show_service_provider, show_ops_metadata=show_ops_metadata, show_contents=show_contents, show_themes=show_themes, webmerc_ss=WebMercScaleSet), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "no-cache,max-age=0" }))
def get_capabilities(args): # TODO: Handle updatesequence request parameter for cache consistency. # Note: Only WMS v1.3.0 is fully supported at this stage, so no version negotiation is necessary # Extract layer metadata from Datacube. platforms = get_layers(refresh=True) return render_template("wms_capabilities.xml", service=get_service_cfg(), platforms=platforms), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "no-cache,max-age=0" })
def get_tiff(req, data): """Uses rasterio MemoryFiles in order to return a streamable GeoTiff response""" # Copied from CEOS. Does not seem to support multi-time dimension data - is this even possible in GeoTiff? supported_dtype_map = { 'uint8': 1, 'uint16': 2, 'int16': 3, 'uint32': 4, 'int32': 5, 'float32': 6, 'float64': 7, 'complex': 9, 'complex64': 10, 'complex128': 11, } dtype_list = [data[array].dtype for array in data.data_vars] dtype = str(max(dtype_list, key=lambda d: supported_dtype_map[str(d)])) data = data.astype(dtype) svc = get_service_cfg() xname = svc.published_CRSs[req.request_crsid]["horizontal_coord"] yname = svc.published_CRSs[req.request_crsid]["vertical_coord"] nodata = 0 for band in data.data_vars: nodata = req.product.band_idx.nodata_val(band) with MemoryFile() as memfile: #pylint: disable=protected-access, bad-continuation with memfile.open(driver="GTiff", width=data.dims[xname], height=data.dims[yname], count=len(data.data_vars), transform=req.affine, crs=req.response_crsid, nodata=nodata, tiled=True, compress="lzw", interleave="band", dtype=dtype) as dst: for idx, band in enumerate(data.data_vars, start=1): dst.write(data[band].values, idx) dst.set_band_description(idx, req.product.band_idx.band_label(band)) dst.update_tags(idx, STATISTICS_MINIMUM=data[band].values.min()) dst.update_tags(idx, STATISTICS_MAXIMUM=data[band].values.max()) dst.update_tags(idx, STATISTICS_MEAN=data[band].values.mean()) dst.update_tags(idx, STATISTICS_STDDEV=data[band].values.std()) return memfile.read()
def get_capabilities(args): # TODO: Handle updatesequence request parameter for cache consistency. # Note: Only WMS v1.3.0 is fully supported at this stage, so no version negotiation is necessary # Extract layer metadata from Datacube. platforms = get_layers(refresh=True) service_cfg = get_service_cfg() url = args.get('Host', args['url_root']) base_url = get_service_base_url(service_cfg.allowed_urls, url) return (render_template("wms_capabilities.xml", service=service_cfg, platforms=platforms, base_url=base_url), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "max-age=10" }))
def legend_graphic(args): params = GetLegendGraphicParameters(args) svc_cfg = get_service_cfg() img = None if not params.style_name: product = params.product legend_config = product.legend if legend_config is not None: if legend_config.get('url', None): pass else: styles = [ product.style_index[s] for s in legend_config.get('styles', []) ] img = create_legends_from_styles(product, styles) return img
def get_tiff(req, data): """Uses rasterio MemoryFiles in order to return a streamable GeoTiff response""" # Copied from CEOS. Does not seem to support multi-time dimension data - is this even possible in GeoTiff? supported_dtype_map = { 'uint8': 1, 'uint16': 2, 'int16': 3, 'uint32': 4, 'int32': 5, 'float32': 6, 'float64': 7, 'complex': 9, 'complex64': 10, 'complex128': 11, } dtype_list = [data[array].dtype for array in data.data_vars] dtype = str(max(dtype_list, key=lambda d: supported_dtype_map[str(d)])) data = data.astype(dtype) svc = get_service_cfg() xname = svc.published_CRSs[req.request_crsid]["horizontal_coord"] yname = svc.published_CRSs[req.request_crsid]["vertical_coord"] with MemoryFile() as memfile: #pylint: disable=protected-access, bad-continuation with memfile.open(driver="GTiff", width=data.dims[xname], height=data.dims[yname], count=len(data.data_vars), transform=_get_transform_from_xr(xname, yname, data), crs=req.response_crsid, dtype=dtype) as dst: for idx, band in enumerate(data.data_vars, start=1): dst.write(data[band].values, idx) # As of rasterio 1.0.2 the nodatavals property is not writable # as suggested in the docs, use the deprecated function dst._set_nodatavals([ req.product.nodata_dict[band] if band in req.product.nodata_dict else 0 for band in data.data_vars ]) return memfile.read()
def get_capabilities(args): # TODO: Handle updatesequence request parameter for cache consistency. # Note: Only WCS v1.0.0 is fully supported at this stage, so no version negotiation is necessary section = args.get("section") if section: section = section.lower() show_service = False show_capability = False show_content_metadata = False if section is None or section == "/": show_service = True show_capability = True show_content_metadata = True elif section == "/wcs_capabilities/service": show_service = True elif section == "/wcs_capabilities/capability": show_capability = True elif section == "/wcs_capabilities/contentmetadata": show_content_metadata = True else: raise WCS1Exception("Invalid section: %s" % section, WCS1Exception.INVALID_PARAMETER_VALUE, locator="Section parameter") # Extract layer metadata from Datacube. platforms = get_layers(refresh=True) service_cfg = get_service_cfg() url = args.get('Host', args['url_root']) base_url = get_service_base_url(service_cfg.allowed_urls, url) return ( render_template("wcs_capabilities.xml", show_service=show_service, show_capability=show_capability, show_content_metadata=show_content_metadata, service=service_cfg, platforms=platforms, base_url=base_url), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "no-cache, max-age=0" }))
def get_coverage(args): decoder = WCS20GetCoverageKVPDecoder(args) try: coverage_id = decoder.coverage_id except KeyError: raise WCS2Exception("Missing coverageid parameter", locator="coverageid") svc_cfg = get_service_cfg(refresh=True) layers = get_layers(refresh=True) product = layers.product_index.get(coverage_id) if not product: raise WCS2Exception("Invalid coverage: %s" % coverage_id, WCS2Exception.COVERAGE_NOT_DEFINED, locator="COVERAGE parameter") if decoder.format: if decoder.format not in svc_cfg.wcs_formats: raise WCS2Exception("Unsupported format: %s" % decoder.format, WCS2Exception.INVALID_PARAMETER_VALUE, locator="FORMAT parameter") elif not svc_cfg.native_wcs_format: raise WCS2Exception("Missing parameter format 'format'", WCS2Exception.MISSING_PARAMETER_VALUE, locator="FORMAT parameter") fmt_cfg = svc_cfg.wcs_formats[decoder.format or svc_cfg.native_wcs_format] return ( '', 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "no-cache, max-age=0" }) )
def ogc_impl(): nocase_args = lower_get_args() nocase_args['referer'] = request.headers.get('Referer', None) nocase_args['origin'] = request.headers.get('Origin', None) nocase_args['requestid'] = request.environ.get("FLASK_REQUEST_ID") service = nocase_args.get("service", "").upper() svc_cfg = get_service_cfg() try: if service == "WMS": # WMS operation Map if svc_cfg.wms: return handle_wms(nocase_args) else: raise WMSException("Invalid service", locator="Service parameter") elif service == "WCS": # WCS operation Map if svc_cfg.wcs: return handle_wcs(nocase_args) else: raise WCS1Exception("Invalid service", locator="Service parameter") else: # Should we return a WMS or WCS exception if there is no service specified? # Defaulting to WMS because that's what we already have. raise WMSException("Invalid service", locator="Service parameter") except OGCException as e: return e.exception_response() except Exception as e: tb = sys.exc_info()[2] if service == "WCS": eclass = WCS1Exception else: eclass = WMSException ogc_e = eclass("Unexpected server error: %s" % str(e), http_response=500) return ogc_e.exception_response(traceback=traceback.extract_tb(tb))
def desc_coverages(args): try: coverage_ids = [s.strip() for s in args['coverageid'].split(',')] except KeyError: raise WCS2Exception("Missing coverageid parameter", locator="coverageid") svc_cfg = get_service_cfg(refresh=True) layers = get_layers(refresh=True) products = [] for coverage_id in coverage_ids: product = layers.product_index.get(coverage_id) if product: products.append(product) else: raise WCS2Exception("Invalid coverage: %s" % coverage_id, WCS2Exception.COVERAGE_NOT_DEFINED, locator="Coverage parameter") # TODO: make a coverge object from each of the 'products' coverages = [ get_coverage_object(svc_cfg, product) for product in products ] encoder = WCS21XMLEncoder() return ( encoder.serialize( encoder.encode_coverage_descriptions(coverages) ), 200, resp_headers({ "Content-Type": "application/xml", "Cache-Control": "no-cache, max-age=0" }) )
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) # Prepare to extract feature info stacker = DataStacker(params.product, params.geobox, params.time) feature_json = {} # --- Begin code section requiring datacube. service_cfg = get_service_cfg() dc = get_cube() try: geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] isel_kwargs = {h_coord: [params.i], v_coord: [params.j]} if not datasets: pass else: available_dates = set() drill = {} for d in datasets: idx_date = (d.center_time + timedelta(hours=params.product.time_zone)).date() available_dates.add(idx_date) pixel_ds = None if idx_date == params.time and "lon" not in feature_json: data = stacker.data([d], skip_corrections=True) # Use i,j image coordinates to extract data pixel from dataset, and # convert to lat/long geographic coordinates if service_cfg.published_CRSs[params.crsid]["geographic"]: # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long feature_json["lat"] = data.latitude[params.j].item() feature_json["lon"] = data.longitude[params.i].item() pixel_ds = data.isel(**isel_kwargs) else: # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[params.i].item() y = data_y[params.j].item() pt = geometry.point(x, y, params.crs) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[ 0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = d.center_time.strftime( "%Y-%m-%d %H:%M:%S UTC") feature_json["bands"] = {} # Collect raw band values for pixel for band in stacker.needed_bands(): ret_val = band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: feature_json["bands"][band] = "n/a" else: if hasattr(pixel_ds[band], 'flags_definition'): flag_def = pixel_ds[band].flags_definition flag_dict = mask_to_dict(flag_def, band_val) ret_val = [ flag_def[k]['description'] for k in filter(flag_dict.get, flag_dict) ] feature_json["bands"][band] = ret_val for k, v in filter( lambda kv: hasattr(kv[1], 'index_function'), params.product.style_index.items()): if v.index_function is None: continue vals_nodata = [ pixel_ds[b] == pixel_ds[b].nodata for b in v.needed_bands ] if any(vals_nodata): continue value = v.index_function(pixel_ds).item() try: feature_json["band_derived"][k] = value except KeyError: feature_json["band_derived"] = {} feature_json["band_derived"][k] = value if params.product.band_drill: if pixel_ds is None: data = stacker.data([d], skip_corrections=True) pixel_ds = data.isel(**isel_kwargs) drill_section = {} for band in params.product.band_drill: band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: drill_section[band] = "n/a" else: drill_section[band] = pixel_ds[band].item() drill[idx_date.strftime("%Y-%m-%d")] = drill_section if drill: feature_json["time_drill"] = drill feature_json["datasets_read"] = len(datasets) my_flags = 0 pqdi = -1 for pqd in pq_datasets: pqdi += 1 idx_date = (pqd.center_time + timedelta(hours=params.product.time_zone)).date() if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val lads = list(available_dates) lads.sort() feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in lads ] feature_json["data_links"] = sorted(get_s3_browser_uris(datasets)) release_cube(dc) except Exception as e: release_cube(dc) raise e # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) feature_json = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): geo_point_geobox = params.geobox else: geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) stacker = DataStacker(params.product, geo_point_geobox, params.time) # --- Begin code section requiring datacube. service_cfg = get_service_cfg() with cube() as dc: datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] s3_bucket = service_cfg.s3_bucket s3_url = service_cfg.s3_url isel_kwargs = {h_coord: 0, v_coord: 0} if datasets: dataset_date_index = {} tz = None for ds in datasets: if tz is None: crs_geo = geometry.CRS("EPSG:4326") ptg = geo_point.to_crs(crs_geo) tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1]) ld = local_date(ds, tz=tz) if ld in dataset_date_index: dataset_date_index[ld].append(ds) else: dataset_date_index[ld] = [ds] # Group datasets by time, load only datasets that match the idx_date available_dates = dataset_date_index.keys() ds_at_time = dataset_date_index.get(params.time, []) _LOG.info("%d datasets, %d at target date", len(datasets), len(ds_at_time)) if len(ds_at_time) > 0: pixel_ds = None data = stacker.data( ds_at_time, skip_corrections=True, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[isel_kwargs[h_coord]].item() y = data_y[isel_kwargs[v_coord]].item() pt = geometry.point(x, y, params.crs) if params.product.multi_product: feature_json["source_product"] = "%s (%s)" % ( ds_at_time[0].type.name, ds_at_time[0].metadata_doc["platform"]["code"]) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = dataset_center_time( ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC") # Collect raw band values for pixel and derived bands from styles feature_json["bands"] = _make_band_dict( params.product, pixel_ds, stacker.needed_bands()) derived_band_dict = _make_derived_band_dict( pixel_ds, params.product.style_index) if derived_band_dict: feature_json["band_derived"] = derived_band_dict if callable(params.product.feature_info_include_custom): additional_data = params.product.feature_info_include_custom( feature_json["bands"]) feature_json.update(additional_data) my_flags = 0 for pqd in pq_datasets: idx_date = dataset_center_time(pqd) if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in sorted(available_dates) ] feature_json["data_links"] = sorted( get_s3_browser_uris(ds_at_time, s3_url, s3_bucket)) if params.product.feature_info_include_utc_dates: feature_json["data_available_for_utc_dates"] = sorted( d.center_time.strftime("%Y-%m-%d") for d in datasets) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def get_coverage_data(req): #pylint: disable=too-many-locals, protected-access dc = get_cube() datasets = [] for t in req.times: # IF t was passed to the datasets method instead of the stacker # constructor, we could use the one stacker. stacker = DataStacker(req.product, req.geobox, t, bands=req.bands) t_datasets = stacker.datasets(dc.index) if not t_datasets: # No matching data for this date continue datasets.extend(t_datasets) if not datasets: # TODO: Return an empty coverage file with full metadata? extents = dc.load(dask_chunks={}, product=req.product.product.name, geopolygon=req.geobox.extent, time=stacker._time) svc = get_service_cfg() x_range = (req.minx, req.maxx) y_range = (req.miny, req.maxy) xname = svc.published_CRSs[req.request_crsid]["horizontal_coord"] yname = svc.published_CRSs[req.request_crsid]["vertical_coord"] if xname in extents: xvals = extents[xname] else: xvals = numpy.linspace(x_range[0], x_range[1], num=req.width) if yname in extents: yvals = extents[yname] else: yvals = numpy.linspace(y_range[0], y_range[1], num=req.height) if svc.published_CRSs[req.request_crsid]["vertical_coord_first"]: nparrays = { band: ((yname, xname), numpy.full((len(yvals), len(xvals)), req.product.nodata_dict[band])) for band in req.bands } else: nparrays = { band: ((xname, yname), numpy.full((len(xvals), len(yvals)), req.product.nodata_dict[band])) for band in req.bands } data = xarray.Dataset(nparrays, coords={ xname: xvals, yname: yvals, }).astype("int16") release_cube(dc) return data if req.product.max_datasets_wcs > 0 and len( datasets) > req.product.max_datasets_wcs: raise WCS1Exception( "This request processes too much data to be served in a reasonable amount of time." "Please reduce the bounds of your request and try again." "(max: %d, this request requires: %d)" % (req.product.max_datasets_wcs, len(datasets))) if req.format["multi-time"] and len(req.times) > 1: # Group by solar day group_by = datacube.api.query.query_group_by(time=req.times, group_by='solar_day') datasets = dc.group_datasets(datasets, group_by) stacker = DataStacker(req.product, req.geobox, req.times[0], bands=req.bands) output = stacker.data(datasets, skip_corrections=True) release_cube(dc) return output
def __init__(self, args): self.args = args layers = get_layers() svc_cfg = get_service_cfg() # Argument: Coverage (required) if "coverage" not in args: raise WCS1Exception("No coverage specified", WCS1Exception.MISSING_PARAMETER_VALUE, locator="COVERAGE parameter") self.product_name = args["coverage"] self.product = layers.product_index.get(self.product_name) if not self.product: raise WCS1Exception("Invalid coverage: %s" % self.product_name, WCS1Exception.COVERAGE_NOT_DEFINED, locator="COVERAGE parameter") # Argument: FORMAT (required) if "format" not in args: raise WCS1Exception("No FORMAT parameter supplied", WCS1Exception.MISSING_PARAMETER_VALUE, locator="FORMAT parameter") if args["format"] not in svc_cfg.wcs_formats: raise WCS1Exception("Unsupported format: %s" % args["format"], WCS1Exception.INVALID_PARAMETER_VALUE, locator="FORMAT parameter") self.format = svc_cfg.wcs_formats[args["format"]] # Argument: (request) CRS (required) if "crs" not in args: raise WCS1Exception("No request CRS specified", WCS1Exception.MISSING_PARAMETER_VALUE, locator="CRS parameter") self.request_crsid = args["crs"] if self.request_crsid not in svc_cfg.published_CRSs: raise WCS1Exception("%s is not a supported CRS" % self.request_crsid, WCS1Exception.INVALID_PARAMETER_VALUE, locator="CRS parameter") self.request_crs = geometry.CRS(self.request_crsid) # Argument: response_crs (optional) if "response_crs" in args: self.response_crsid = args["response_crs"] if self.response_crsid not in svc_cfg.published_CRSs: raise WCS1Exception("%s is not a supported CRS" % self.request_crsid, WCS1Exception.INVALID_PARAMETER_VALUE, locator="RESPONSE_CRS parameter") self.response_crs = geometry.CRS(self.response_crsid) else: self.response_crsid = self.request_crsid self.response_crs = self.request_crs # Arguments: One of BBOX or TIME is required #if "bbox" not in args and "time" not in args: # raise WCS1Exception("At least one of BBOX or TIME parameters must be supplied", # WCS1Exception.MISSING_PARAMETER_VALUE, # locator="BBOX or TIME parameter" # ) # Argument: BBOX (technically not required if TIME supplied, but # it's not clear to me what that would mean.) # For WCS 1.0.0 all bboxes will be specified as minx, miny, maxx, maxy if "bbox" not in args: raise WCS1Exception("No BBOX parameter supplied", WCS1Exception.MISSING_PARAMETER_VALUE, locator="BBOX or TIME parameter") try: self.minx, self.miny, self.maxx, self.maxy = map( float, args['bbox'].split(',')) except: raise WCS1Exception("Invalid BBOX parameter", WCS1Exception.INVALID_PARAMETER_VALUE, locator="BBOX parameter") # Argument: TIME if self.product.wcs_sole_time: self.times = [parse(self.product.wcs_sole_time).date()] elif "time" not in args: # CEOS treats no supplied time argument as all time. # I'm really not sure what the right thing to do is, but QGIS wants us to do SOMETHING self.times = [self.product.ranges["times"][-1]] else: # TODO: the min/max/res format option? # It's a bit underspeced. I'm not sure what the "res" would look like. times = args["time"].split(",") self.times = [] if times == "now": pass else: for t in times: try: time = parse(t).date() if time not in self.product.ranges["time_set"]: raise WCS1Exception( "Time value '%s' not a valid date for coverage %s" % (t, self.product_name), WCS1Exception.INVALID_PARAMETER_VALUE, locator="TIME parameter") self.times.append(time) except ValueError: raise WCS1Exception( "Time value '%s' not a valid ISO-8601 date" % t, WCS1Exception.INVALID_PARAMETER_VALUE, locator="TIME parameter") self.times.sort() if len(times) == 0: raise WCS1Exception("No valid ISO-8601 dates", WCS1Exception.INVALID_PARAMETER_VALUE, locator="TIME parameter") elif len(times) > 1 and not self.format["multi-time"]: raise WCS1Exception( "Cannot select more than one time slice with the %s format" % self.format["name"], WCS1Exception.INVALID_PARAMETER_VALUE, locator="TIME and FORMAT parameters") # Range constraint parameter: MEASUREMENTS # No default is set in the DescribeCoverage, so it is required # But QGIS wants us to work without one, so take default from config if "measurements" in args: bands = args["measurements"] self.bands = [] for b in bands.split(","): try: self.bands.append(self.product.band_idx.band(b)) except ProductLayerException: raise WCS1Exception("Invalid measurement '%s'" % b, WCS1Exception.INVALID_PARAMETER_VALUE, locator="MEASUREMENTS parameter") if not bands: raise WCS1Exception("No measurements supplied", WCS1Exception.INVALID_PARAMETER_VALUE, locator="MEASUREMENTS parameter") elif "styles" in args and args["styles"]: # Use style bands. # Non-standard protocol extension. # # As we have correlated WCS and WMS service implementations, # we can accept a style from WMS, and return the bands used for it. styles = args["styles"].split(",") if len(styles) != 1: raise WCS1Exception("Multiple style parameters not supported") style = self.product.style_index.get(styles[0]) if style: self.bands = style.needed_bands else: self.bands = self.product.wcs_default_bands else: self.bands = self.product.wcs_default_bands # Argument: EXCEPTIONS (optional - defaults to XML) if "exceptions" in args and args[ "exceptions"] != "application/vnd.ogc.se_xml": raise WCS1Exception("Unsupported exception format: " % args["exceptions"], WCS1Exception.INVALID_PARAMETER_VALUE, locator="EXCEPTIONS parameter") # Argument: INTERPOLATION (optional only nearest-neighbour currently supported.) # If 'none' is supported in future, validation of width/height/res will need to change. if "interpolation" in args and args[ "interpolation"] != "nearest neighbor": raise WCS1Exception("Unsupported interpolation method: " % args["interpolation"], WCS1Exception.INVALID_PARAMETER_VALUE, locator="INTERPOLATION parameter") if "width" in args: if "height" not in args: raise WCS1Exception( "WIDTH parameter supplied without HEIGHT parameter", WCS1Exception.MISSING_PARAMETER_VALUE, locator="WIDTH/HEIGHT parameters") if "resx" in args or "resy" in args: raise WCS1Exception( "Specify WIDTH/HEIGHT parameters OR RESX/RESY parameters - not both", WCS1Exception.MISSING_PARAMETER_VALUE, locator="RESX/RESY/WIDTH/HEIGHT parameters") try: self.height = int(args["height"]) if self.height < 1: raise ValueError() except ValueError: raise WCS1Exception( "HEIGHT parameter must be a positive integer", WCS1Exception.INVALID_PARAMETER_VALUE, locator="HEIGHT parameter") try: self.width = int(args["width"]) if self.width < 1: raise ValueError() except ValueError: raise WCS1Exception( "WIDTH parameter must be a positive integer", WCS1Exception.INVALID_PARAMETER_VALUE, locator="WIDTH parameter") self.resx = (self.maxx - self.minx) / self.width self.resy = (self.maxy - self.miny) / self.height elif "resx" in args: if "resy" not in args: raise WCS1Exception( "RESX parameter supplied without RESY parameter", WCS1Exception.MISSING_PARAMETER_VALUE, locator="RESX/RESY parameters") if "height" in args: raise WCS1Exception( "Specify WIDTH/HEIGHT parameters OR RESX/RESY parameters - not both", WCS1Exception.MISSING_PARAMETER_VALUE, locator="RESX/RESY/WIDTH/HEIGHT parameters") try: self.resx = float(args["resx"]) if self.resx <= 0.0: raise ValueError(0) except ValueError: raise WCS1Exception("RESX parameter must be a positive number", WCS1Exception.INVALID_PARAMETER_VALUE, locator="RESX parameter") try: self.resy = float(args["resy"]) if self.resy <= 0.0: raise ValueError(0) except ValueError: raise WCS1Exception("RESY parameter must be a positive number", WCS1Exception.INVALID_PARAMETER_VALUE, locator="RESY parameter") self.width = (self.maxx - self.minx) / self.resx self.height = (self.maxy - self.miny) / self.resy self.width = int(self.width + 0.5) self.height = int(self.height + 0.5) elif "height" in args: raise WCS1Exception( "HEIGHT parameter supplied without WIDTH parameter", WCS1Exception.MISSING_PARAMETER_VALUE, locator="WIDTH/HEIGHT parameters") elif "resy" in args: raise WCS1Exception( "RESY parameter supplied without RESX parameter", WCS1Exception.MISSING_PARAMETER_VALUE, locator="RESX/RESY parameters") else: raise WCS1Exception( "You must specify either the WIDTH/HEIGHT parameters or RESX/RESY", WCS1Exception.MISSING_PARAMETER_VALUE, locator="RESX/RESY/WIDTH/HEIGHT parameters") self.extent = geometry.polygon([(self.minx, self.miny), (self.minx, self.maxy), (self.maxx, self.maxy), (self.maxx, self.miny), (self.minx, self.miny)], self.request_crs) xscale = (self.maxx - self.minx) / self.width yscale = (self.miny - self.maxy) / self.height trans_aff = Affine.translation(self.minx, self.maxy) scale_aff = Affine.scale(xscale, yscale) self.affine = trans_aff * scale_aff self.geobox = geometry.GeoBox(self.width, self.height, self.affine, self.request_crs)
def get_crsids(svc=None): if not svc: svc = get_service_cfg() return svc.published_CRSs.keys()
def activated(self): svc_cfg = get_service_cfg() return getattr(svc_cfg, self.service)
def create_multiprod_range_entry(dc, product, crses): conn = get_sqlconn(dc) txn = conn.begin() if isinstance(product, dict): prodids = [p.id for p in product["products"]] wms_name = product["name"] else: prodids = [p.id for p in product.products] wms_name = product.name # Attempt to insert row conn.execute( """ INSERT INTO wms.multiproduct_ranges (wms_product_name,lat_min,lat_max,lon_min,lon_max,dates,bboxes) VALUES (%(p_id)s, 0, 0, 0, 0, %(empty)s, %(empty)s) ON CONFLICT (wms_product_name) DO NOTHING """, { "p_id": wms_name, "empty": Json("") }) # Update extents conn.execute( """ UPDATE wms.multiproduct_ranges SET (lat_min,lat_max,lon_min,lon_max) = (wms_get_min(%(p_prodids)s, 'lat'), wms_get_max(%(p_prodids)s, 'lat'), wms_get_min(%(p_prodids)s, 'lon'), wms_get_max(%(p_prodids)s, 'lon')) WHERE wms_product_name=%(p_id)s """, { "p_id": wms_name, "p_prodids": prodids }) # Create sorted list of dates conn.execute( """ WITH sorted AS (SELECT to_jsonb(array_agg(dates.d)) AS dates FROM (SELECT DISTINCT to_date(metadata::json->'extent'->>'center_dt', 'YYYY-MM-DD') AS d FROM agdc.dataset WHERE dataset_type_ref = any (%(p_prodids)s) AND archived IS NULL ORDER BY d) dates) UPDATE wms.multiproduct_ranges SET dates=sorted.dates FROM sorted WHERE wms_product_name=%(p_id)s """, { "p_id": wms_name, "p_prodids": prodids }) # calculate bounding boxes results = list( conn.execute( """ SELECT lat_min,lat_max,lon_min,lon_max FROM wms.multiproduct_ranges WHERE wms_product_name=%(p_id)s """, {"p_id": wms_name})) r = results[0] epsg4326 = datacube.utils.geometry.CRS("EPSG:4326") box = datacube.utils.geometry.box(float(r[2]), float(r[0]), float(r[3]), float(r[1]), epsg4326) svc = get_service_cfg() conn.execute( """ UPDATE wms.multiproduct_ranges SET bboxes = %s::jsonb WHERE wms_product_name=%s """, Json({ crsid: jsonise_bbox(box.to_crs(crs).boundingbox) for crsid, crs in get_crses(svc).items() }), wms_name) txn.commit() conn.close() return
def _get_geobox_xy(args, crs): if get_service_cfg().published_CRSs[crs.crs_str]["vertical_coord_first"]: miny, minx, maxy, maxx = map(float, args['bbox'].split(',')) else: minx, miny, maxx, maxy = map(float, args['bbox'].split(',')) return minx, miny, maxx, maxy
def determine_product_ranges(dc, dc_product, extractor): # pylint: disable=too-many-locals, too-many-branches, too-many-statements, protected-access start = datetime.now() print("Product: ", dc_product.name) r = { "lat": { "min": None, "max": None }, "lon": { "min": None, "max": None }, } sub_r = {} time_set = set() svc = get_service_cfg() print("OK, Let's do it") crsids = get_crsids(svc) calculate_extent = not svc.use_default_extent extents = {crsid: None for crsid in crsids} crses = get_crses(svc) ds_count = 0 for ds in dc.find_datasets(product=dc_product.name): print("Processing a dataset", ds.id) loc_date = local_date(ds) time_set.add(loc_date) if calculate_extent or extractor is not None: if extractor is not None: path = extractor(ds) if path not in sub_r: sub_r[path] = { "lat": { "min": None, "max": None, }, "lon": { "min": None, "max": None, }, "time_set": set(), "extents": {crsid: None for crsid in crsids} } sub_r[path]["lat"]["min"] = accum_min( sub_r[path]["lat"]["min"], ds.metadata.lat.begin) sub_r[path]["lat"]["max"] = accum_max( sub_r[path]["lat"]["max"], ds.metadata.lat.end) sub_r[path]["lon"]["min"] = accum_min( sub_r[path]["lon"]["min"], ds.metadata.lon.begin) sub_r[path]["lon"]["max"] = accum_max( sub_r[path]["lon"]["max"], ds.metadata.lon.end) else: path = None r["lat"]["min"] = accum_min(r["lat"]["min"], ds.metadata.lat.begin) r["lat"]["max"] = accum_max(r["lat"]["max"], ds.metadata.lat.end) r["lon"]["min"] = accum_min(r["lon"]["min"], ds.metadata.lon.begin) r["lon"]["max"] = accum_max(r["lon"]["max"], ds.metadata.lon.end) if path is not None: sub_r[path]["time_set"].add(loc_date) for crsid in crsids: print("Working with CRS", crsid) crs = crses[crsid] ext = ds.extent if ext.crs != crs: ext = ext.to_crs(crs) cvx_ext = ext.convex_hull if cvx_ext != ext: print("INFO: Dataset", ds.id, "CRS", crsid, "extent is not convex.") if extents[crsid] is None: extents[crsid] = cvx_ext else: if not extents[crsid].is_valid: print("WARNING: Extent Union for", ds.id, "CRS", crsid, "is not valid") if not cvx_ext.is_valid: print("WARNING: Extent for CRS", crsid, "is not valid") union = extents[crsid].union(cvx_ext) if union._geom is not None: extents[crsid] = union else: print("WARNING: Dataset", ds.id, "CRS", crsid, "union topology exception, ignoring union") if path is not None: if sub_r[path]["extents"][crsid] is None: sub_r[path]["extents"][crsid] = cvx_ext else: sub_r[path]["extents"][crsid] = sub_r[path]["extents"][ crsid].union(cvx_ext) ds_count += 1 # Default extent usage if not calculate_extent and ds_count > 0: for crsid in crsids: crs = crses[crsid] default = datacube.utils.geometry.Geometry(DEFAULT_GEOJSON, crs=DEFAULT_GEOJSON_CRS) extents[crsid] = default.to_crs(crs) r["times"] = sorted(time_set) r["time_set"] = time_set r["bboxes"] = { crsid: jsonise_bbox(extents[crsid].boundingbox) for crsid in crsids } print("LATS: ", r["lat"], " LONS: ", r["lon"]) if extractor is not None: for path in sub_r.keys(): sub_r[path]["times"] = sorted(sub_r[path]["time_set"]) sub_r[path]["bboxes"] = { crsid: jsonise_bbox(sub_r[path]["extents"][crsid].boundingbox) for crsid in crsids } del sub_r[path]["extents"] r["sub_products"] = sub_r end = datetime.now() print("Scanned %d datasets in %d seconds" % (ds_count, (end - start).seconds)) return r