def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) feature_json = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): geo_point_geobox = params.geobox else: geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) stacker = DataStacker(params.product, geo_point_geobox, params.time) # --- Begin code section requiring datacube. service_cfg = get_service_cfg() with cube() as dc: datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] s3_bucket = service_cfg.s3_bucket s3_url = service_cfg.s3_url isel_kwargs = {h_coord: 0, v_coord: 0} if datasets: dataset_date_index = {} tz = None for ds in datasets: if tz is None: crs_geo = geometry.CRS("EPSG:4326") ptg = geo_point.to_crs(crs_geo) tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1]) ld = local_date(ds, tz=tz) if ld in dataset_date_index: dataset_date_index[ld].append(ds) else: dataset_date_index[ld] = [ds] # Group datasets by time, load only datasets that match the idx_date available_dates = dataset_date_index.keys() ds_at_time = dataset_date_index.get(params.time, []) _LOG.info("%d datasets, %d at target date", len(datasets), len(ds_at_time)) if len(ds_at_time) > 0: pixel_ds = None data = stacker.data( ds_at_time, skip_corrections=True, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[isel_kwargs[h_coord]].item() y = data_y[isel_kwargs[v_coord]].item() pt = geometry.point(x, y, params.crs) if params.product.multi_product: feature_json["source_product"] = "%s (%s)" % ( ds_at_time[0].type.name, ds_at_time[0].metadata_doc["platform"]["code"]) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = dataset_center_time( ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC") # Collect raw band values for pixel and derived bands from styles feature_json["bands"] = _make_band_dict( params.product, pixel_ds, stacker.needed_bands()) derived_band_dict = _make_derived_band_dict( pixel_ds, params.product.style_index) if derived_band_dict: feature_json["band_derived"] = derived_band_dict if callable(params.product.feature_info_include_custom): additional_data = params.product.feature_info_include_custom( feature_json["bands"]) feature_json.update(additional_data) my_flags = 0 for pqd in pq_datasets: idx_date = dataset_center_time(pqd) if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in sorted(available_dates) ] feature_json["data_links"] = sorted( get_s3_browser_uris(ds_at_time, s3_url, s3_bucket)) if params.product.feature_info_include_utc_dates: feature_json["data_available_for_utc_dates"] = sorted( d.center_time.strftime("%Y-%m-%d") for d in datasets) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) # Prepare to extract feature info stacker = DataStacker(params.product, params.geobox, params.time) feature_json = {} # --- Begin code section requiring datacube. service_cfg = get_service_cfg() dc = get_cube() try: geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] isel_kwargs = {h_coord: [params.i], v_coord: [params.j]} if not datasets: pass else: available_dates = set() drill = {} for d in datasets: idx_date = (d.center_time + timedelta(hours=params.product.time_zone)).date() available_dates.add(idx_date) pixel_ds = None if idx_date == params.time and "lon" not in feature_json: data = stacker.data([d], skip_corrections=True) # Use i,j image coordinates to extract data pixel from dataset, and # convert to lat/long geographic coordinates if service_cfg.published_CRSs[params.crsid]["geographic"]: # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long feature_json["lat"] = data.latitude[params.j].item() feature_json["lon"] = data.longitude[params.i].item() pixel_ds = data.isel(**isel_kwargs) else: # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[params.i].item() y = data_y[params.j].item() pt = geometry.point(x, y, params.crs) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[ 0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = d.center_time.strftime( "%Y-%m-%d %H:%M:%S UTC") feature_json["bands"] = {} # Collect raw band values for pixel for band in stacker.needed_bands(): ret_val = band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: feature_json["bands"][band] = "n/a" else: if hasattr(pixel_ds[band], 'flags_definition'): flag_def = pixel_ds[band].flags_definition flag_dict = mask_to_dict(flag_def, band_val) ret_val = [ flag_def[k]['description'] for k in filter(flag_dict.get, flag_dict) ] feature_json["bands"][band] = ret_val for k, v in filter( lambda kv: hasattr(kv[1], 'index_function'), params.product.style_index.items()): if v.index_function is None: continue vals_nodata = [ pixel_ds[b] == pixel_ds[b].nodata for b in v.needed_bands ] if any(vals_nodata): continue value = v.index_function(pixel_ds).item() try: feature_json["band_derived"][k] = value except KeyError: feature_json["band_derived"] = {} feature_json["band_derived"][k] = value if params.product.band_drill: if pixel_ds is None: data = stacker.data([d], skip_corrections=True) pixel_ds = data.isel(**isel_kwargs) drill_section = {} for band in params.product.band_drill: band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: drill_section[band] = "n/a" else: drill_section[band] = pixel_ds[band].item() drill[idx_date.strftime("%Y-%m-%d")] = drill_section if drill: feature_json["time_drill"] = drill feature_json["datasets_read"] = len(datasets) my_flags = 0 pqdi = -1 for pqd in pq_datasets: pqdi += 1 idx_date = (pqd.center_time + timedelta(hours=params.product.time_zone)).date() if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val lads = list(available_dates) lads.sort() feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in lads ] feature_json["data_links"] = sorted(get_s3_browser_uris(datasets)) release_cube(dc) except Exception as e: release_cube(dc) raise e # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})