def _bounding_pts(minx, miny, maxx, maxy, width, height, src_crs, dst_crs=None): # pylint: disable=too-many-locals p1 = geometry.point(minx, maxy, src_crs) p2 = geometry.point(minx, miny, src_crs) p3 = geometry.point(maxx, maxy, src_crs) p4 = geometry.point(maxx, miny, src_crs) conv = dst_crs is not None gp1 = p1.to_crs(dst_crs) if conv else p1 gp2 = p2.to_crs(dst_crs) if conv else p2 gp3 = p3.to_crs(dst_crs) if conv else p3 gp4 = p4.to_crs(dst_crs) if conv else p4 minx = min(gp1.points[0][0], gp2.points[0][0], gp3.points[0][0], gp4.points[0][0]) maxx = max(gp1.points[0][0], gp2.points[0][0], gp3.points[0][0], gp4.points[0][0]) miny = min(gp1.points[0][1], gp2.points[0][1], gp3.points[0][1], gp4.points[0][1]) maxy = max(gp1.points[0][1], gp2.points[0][1], gp3.points[0][1], gp4.points[0][1]) # miny-maxy for negative scale factor and maxy in the translation, includes inversion of Y axis. return minx, miny, maxx, maxy
def zoom_factor(args, crs): # Determine the geographic "zoom factor" for the request. # (Larger zoom factor means deeper zoom. Smaller zoom factor means larger area.) # Extract request bbox and crs width = int(args['width']) height = int(args['height']) minx, miny, maxx, maxy = map(float, args['bbox'].split(',')) p1 = geometry.point(minx, maxy, crs) p2 = geometry.point(minx, miny, crs) p3 = geometry.point(maxx, maxy, crs) p4 = geometry.point(maxx, miny, crs) # Project to a geographic coordinate system # This is why we can't just use the regular geobox. The scale needs to be # "standardised" in some sense, not dependent on the CRS of the request. geo_crs = geometry.CRS("EPSG:4326") gp1 = p1.to_crs(geo_crs) gp2 = p2.to_crs(geo_crs) gp3 = p3.to_crs(geo_crs) gp4 = p4.to_crs(geo_crs) minx = min(gp1.points[0][0], gp2.points[0][0], gp3.points[0][0], gp4.points[0][0]) maxx = max(gp1.points[0][0], gp2.points[0][0], gp3.points[0][0], gp4.points[0][0]) miny = min(gp1.points[0][1], gp2.points[0][1], gp3.points[0][1], gp4.points[0][1]) maxy = max(gp1.points[0][1], gp2.points[0][1], gp3.points[0][1], gp4.points[0][1]) # Create geobox affine transformation (N.B. Don't need an actual Geobox) affine = Affine.translation(minx, miny) * Affine.scale((maxx - minx) / width, (maxy - miny) / height) # Zoom factor is the reciprocal of the square root of the transform determinant # (The determinant is x scale factor multiplied by the y scale factor) return 1.0 / math.sqrt(affine.determinant)
def test_props(): crs = epsg4326 box1 = geometry.box(10, 10, 30, 30, crs=crs) assert box1 assert box1.is_valid assert not box1.is_empty assert box1.area == 400.0 assert box1.boundary.length == 80.0 assert box1.centroid == geometry.point(20, 20, crs) triangle = geometry.polygon([(10, 20), (20, 20), (20, 10), (10, 20)], crs=crs) assert triangle.envelope == geometry.BoundingBox(10, 10, 20, 20) outer = next(iter(box1)) assert outer.length == 80.0 box1copy = geometry.box(10, 10, 30, 30, crs=crs) assert box1 == box1copy assert box1.convex_hull == box1copy # NOTE: this might fail because of point order box2 = geometry.box(20, 10, 40, 30, crs=crs) assert box1 != box2 bbox = geometry.BoundingBox(1, 0, 10, 13) assert bbox.width == 9 assert bbox.height == 13 pt = geometry.point(3, 4, crs) assert pt.json['coordinates'] == (3.0, 4.0) assert 'Point' in str(pt) assert bool(pt) is True assert pt.__nonzero__() is True
def geobox_info(extent, valid_data=None): image_bounds = extent.boundingbox data_bounds = valid_data.boundingbox if valid_data else image_bounds ul = geometry.point(data_bounds.left, data_bounds.top, crs=extent.crs).to_crs(geometry.CRS('EPSG:4326')) ur = geometry.point(data_bounds.right, data_bounds.top, crs=extent.crs).to_crs(geometry.CRS('EPSG:4326')) lr = geometry.point(data_bounds.right, data_bounds.bottom, crs=extent.crs).to_crs(geometry.CRS('EPSG:4326')) ll = geometry.point(data_bounds.left, data_bounds.bottom, crs=extent.crs).to_crs(geometry.CRS('EPSG:4326')) doc = { 'extent': { 'coord': { 'ul': { 'lon': ul.points[0][0], 'lat': ul.points[0][1] }, 'ur': { 'lon': ur.points[0][0], 'lat': ur.points[0][1] }, 'lr': { 'lon': lr.points[0][0], 'lat': lr.points[0][1] }, 'll': { 'lon': ll.points[0][0], 'lat': ll.points[0][1] }, } }, 'grid_spatial': { 'projection': { 'spatial_reference': str(extent.crs), 'geo_ref_points': { 'ul': { 'x': image_bounds.left, 'y': image_bounds.top }, 'ur': { 'x': image_bounds.right, 'y': image_bounds.top }, 'll': { 'x': image_bounds.left, 'y': image_bounds.bottom }, 'lr': { 'x': image_bounds.right, 'y': image_bounds.bottom }, } } } } if valid_data: doc['grid_spatial']['projection'][ 'valid_data'] = valid_data.__geo_interface__ return doc
def test_common_crs(): assert common_crs([]) is None assert common_crs([geometry.point(0, 0, epsg4326), geometry.line([(0, 0), (1, 1)], epsg4326)]) is epsg4326 with pytest.raises(CRSMismatchError): common_crs([geometry.point(0, 0, epsg4326), geometry.line([(0, 0), (1, 1)], epsg3857)])
def bbox_crs(self, crs): from datacube.utils import geometry low = geometry.point(*self.lower_corner, crs=self.native_crs).to_crs(crs) up = geometry.point(*self.upper_corner, crs=self.native_crs).to_crs(crs) return ( float(low.coords[0][0]), float(low.coords[0][1]), float(up.coords[0][0]), float(up.coords[0][1]), )
def test_point_transformer(): from datacube.utils.geometry import point tr = epsg3857.transformer_to_crs(epsg4326) tr_back = epsg4326.transformer_to_crs(epsg3857) pts = [(0, 0), (0, 1), (1, 2), (10, 11)] x, y = np.vstack(pts).astype('float64').T pts_expect = [ point(*pt, epsg3857).to_crs(epsg4326).points[0] for pt in pts ] x_expect = [pt[0] for pt in pts_expect] y_expect = [pt[1] for pt in pts_expect] x_, y_ = tr(x, y) assert x_.shape == x.shape np.testing.assert_array_almost_equal(x_, x_expect) np.testing.assert_array_almost_equal(y_, y_expect) x, y = (a.reshape(2, 2) for a in (x, y)) x_, y_ = tr(x, y) assert x_.shape == x.shape xb, yb = tr_back(x_, y_) np.testing.assert_array_almost_equal(x, xb) np.testing.assert_array_almost_equal(y, yb) # check nans x_, y_ = tr(np.asarray([np.nan, 0, np.nan]), np.asarray([0, np.nan, np.nan])) assert np.isnan(x_).all() assert np.isnan(y_).all()
def test_unary_union(): box1 = geometry.box(10, 10, 30, 30, crs=epsg4326) box2 = geometry.box(20, 10, 40, 30, crs=epsg4326) box3 = geometry.box(30, 10, 50, 30, crs=epsg4326) box4 = geometry.box(40, 10, 60, 30, crs=epsg4326) union0 = geometry.unary_union([box1]) assert union0 == box1 union1 = geometry.unary_union([box1, box4]) assert union1.type == 'MultiPolygon' assert union1.area == 2.0 * box1.area union2 = geometry.unary_union([box1, box2]) assert union2.type == 'Polygon' assert union2.area == 1.5 * box1.area union3 = geometry.unary_union([box1, box2, box3, box4]) assert union3.type == 'Polygon' assert union3.area == 2.5 * box1.area union4 = geometry.unary_union([union1, box2, box3]) assert union4.type == 'Polygon' assert union4.area == 2.5 * box1.area assert geometry.unary_union([]) is None with pytest.raises(ValueError): pt = geometry.point(6, 7, epsg4326) geometry.unary_union([pt, pt])
def img_coords_to_geopoint(geobox, i, j): cfg = get_config() h_coord = cfg.published_CRSs[geobox.crs.crs_str]["horizontal_coord"] v_coord = cfg.published_CRSs[geobox.crs.crs_str]["vertical_coord"] return geometry.point(geobox.coordinates[h_coord].values[int(i)], geobox.coordinates[v_coord].values[int(j)], geobox.crs)
def test_props(): crs = epsg4326 box1 = geometry.box(10, 10, 30, 30, crs=crs) assert box1 assert box1.is_valid assert not box1.is_empty assert box1.area == 400.0 assert box1.boundary.length == 80.0 assert box1.centroid == geometry.point(20, 20, crs) triangle = geometry.polygon([(10, 20), (20, 20), (20, 10), (10, 20)], crs=crs) assert triangle.boundingbox == geometry.BoundingBox(10, 10, 20, 20) assert triangle.envelope.contains(triangle) assert box1.length == 80.0 box1copy = geometry.box(10, 10, 30, 30, crs=crs) assert box1 == box1copy assert box1.convex_hull == box1copy # NOTE: this might fail because of point order box2 = geometry.box(20, 10, 40, 30, crs=crs) assert box1 != box2 bbox = geometry.BoundingBox(1, 0, 10, 13) assert bbox.width == 9 assert bbox.height == 13 assert bbox.points == [(1, 0), (1, 13), (10, 0), (10, 13)] assert bbox.transform(Affine.identity()) == bbox assert bbox.transform(Affine.translation(1, 2)) == geometry.BoundingBox( 2, 2, 11, 15) pt = geometry.point(3, 4, crs) assert pt.json['coordinates'] == (3.0, 4.0) assert 'Point' in str(pt) assert bool(pt) is True assert pt.__nonzero__() is True # check "CRS as string is converted to class automatically" assert isinstance(geometry.point(3, 4, 'epsg:3857').crs, geometry.CRS) # constructor with bad input should raise ValueError with pytest.raises(ValueError): geometry.Geometry(object())
def img_coords_to_geopoint(geobox, i, j): h_coord = service_cfg["published_CRSs"][geobox.crs.crs_str].get( "horizontal_coord", "longitude") v_coord = service_cfg["published_CRSs"][geobox.crs.crs_str].get( "vertical_coord", "latitude") return geometry.point(geobox.coordinates[h_coord].values[int(i)], geobox.coordinates[v_coord].values[int(j)], geobox.crs)
def test_multigeom(): p1, p2 = (0, 0), (1, 2) p3, p4 = (3, 4), (5, 6) b1 = geometry.box(*p1, *p2, epsg4326) b2 = geometry.box(*p3, *p4, epsg4326) bb = multigeom([b1, b2]) assert bb.type == 'MultiPolygon' assert bb.crs is b1.crs assert len(list(bb)) == 2 g1 = geometry.line([p1, p2], None) g2 = geometry.line([p3, p4], None) gg = multigeom(iter([g1, g2, g1])) assert gg.type == 'MultiLineString' assert gg.crs is g1.crs assert len(list(gg)) == 3 g1 = geometry.point(*p1, epsg3857) g2 = geometry.point(*p2, epsg3857) g3 = geometry.point(*p3, epsg3857) gg = multigeom(iter([g1, g2, g3])) assert gg.type == 'MultiPoint' assert gg.crs is g1.crs assert len(list(gg)) == 3 assert list(gg)[0] == g1 assert list(gg)[1] == g2 assert list(gg)[2] == g3 # can't mix types with pytest.raises(ValueError): multigeom([geometry.line([p1, p2], None), geometry.point(*p1, None)]) # can't mix CRSs with pytest.raises(CRSMismatchError): multigeom([ geometry.line([p1, p2], epsg4326), geometry.line([p3, p4], epsg3857) ]) # only some types are supported on input with pytest.raises(ValueError): multigeom([gg])
def solar_correct_data(data, dataset): # Apply solar angle correction to the data for a dataset. # See for example http://gsp.humboldt.edu/olm_2015/Courses/GSP_216_Online/lesson4-1/radiometric.html native_x = (dataset.bounds.right + dataset.bounds.left) / 2.0 native_y = (dataset.bounds.top + dataset.bounds.bottom) / 2.0 pt = geometry.point(native_x, native_y, dataset.crs) crs_geo = geometry.CRS("EPSG:4326") geo_pt = pt.to_crs(crs_geo) data_time = dataset.center_time.astimezone(utc) data_lon, data_lat = geo_pt.coords[0] csz = cosine_of_solar_zenith(data_lat, data_lon, data_time) return data / csz
def test_pix_transform(): pt = tuple([ int(x / 10) * 10 for x in geometry.point(145, -35, epsg4326).to_crs(epsg3577).coords[0] ]) A = mkA(scale=(20, -20), translation=pt) src = geometry.GeoBox(1024, 512, A, epsg3577) dst = geometry.GeoBox.from_geopolygon(src.geographic_extent, (0.0001, -0.0001)) tr = native_pix_transform(src, dst) pts_src = [(0, 0), (10, 20), (300, 200)] pts_dst = tr(pts_src) pts_src_ = tr.back(pts_dst) np.testing.assert_almost_equal(pts_src, pts_src_) assert tr.linear is None # check identity transform tr = native_pix_transform(src, src) pts_src = [(0, 0), (10, 20), (300, 200)] pts_dst = tr(pts_src) pts_src_ = tr.back(pts_dst) np.testing.assert_almost_equal(pts_src, pts_src_) np.testing.assert_almost_equal(pts_src, pts_dst) assert tr.linear is not None assert tr.back.linear is not None assert tr.back.back is tr # check scale only change tr = native_pix_transform(src, scaled_down_geobox(src, 2)) pts_dst = tr(pts_src) pts_src_ = tr.back(pts_dst) assert tr.linear is not None assert tr.back.linear is not None assert tr.back.back is tr np.testing.assert_almost_equal(pts_dst, [(x / 2, y / 2) for (x, y) in pts_src]) np.testing.assert_almost_equal(pts_src, pts_src_)
def test_props(): box1 = geometry.box(10, 10, 30, 30, crs=geometry.CRS('EPSG:4326')) assert box1 assert box1.is_valid assert not box1.is_empty assert box1.area == 400.0 assert box1.boundary.length == 80.0 assert box1.centroid == geometry.point(20, 20, geometry.CRS('EPSG:4326')) triangle = geometry.polygon([(10, 20), (20, 20), (20, 10), (10, 20)], crs=geometry.CRS('EPSG:4326')) assert triangle.envelope == geometry.BoundingBox(10, 10, 20, 20) outer = next(iter(box1)) assert outer.length == 80.0 box1copy = geometry.box(10, 10, 30, 30, crs=geometry.CRS('EPSG:4326')) assert box1 == box1copy assert box1.convex_hull == box1copy # NOTE: this might fail because of point order box2 = geometry.box(20, 10, 40, 30, crs=geometry.CRS('EPSG:4326')) assert box1 != box2
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) # Prepare to extract feature info stacker = DataStacker(params.product, params.geobox, params.time) feature_json = {} # --- Begin code section requiring datacube. service_cfg = get_service_cfg() dc = get_cube() try: geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] isel_kwargs = {h_coord: [params.i], v_coord: [params.j]} if not datasets: pass else: available_dates = set() drill = {} for d in datasets: idx_date = (d.center_time + timedelta(hours=params.product.time_zone)).date() available_dates.add(idx_date) pixel_ds = None if idx_date == params.time and "lon" not in feature_json: data = stacker.data([d], skip_corrections=True) # Use i,j image coordinates to extract data pixel from dataset, and # convert to lat/long geographic coordinates if service_cfg.published_CRSs[params.crsid]["geographic"]: # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long feature_json["lat"] = data.latitude[params.j].item() feature_json["lon"] = data.longitude[params.i].item() pixel_ds = data.isel(**isel_kwargs) else: # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[params.i].item() y = data_y[params.j].item() pt = geometry.point(x, y, params.crs) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[ 0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = d.center_time.strftime( "%Y-%m-%d %H:%M:%S UTC") feature_json["bands"] = {} # Collect raw band values for pixel for band in stacker.needed_bands(): ret_val = band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: feature_json["bands"][band] = "n/a" else: if hasattr(pixel_ds[band], 'flags_definition'): flag_def = pixel_ds[band].flags_definition flag_dict = mask_to_dict(flag_def, band_val) ret_val = [ flag_def[k]['description'] for k in filter(flag_dict.get, flag_dict) ] feature_json["bands"][band] = ret_val for k, v in filter( lambda kv: hasattr(kv[1], 'index_function'), params.product.style_index.items()): if v.index_function is None: continue vals_nodata = [ pixel_ds[b] == pixel_ds[b].nodata for b in v.needed_bands ] if any(vals_nodata): continue value = v.index_function(pixel_ds).item() try: feature_json["band_derived"][k] = value except KeyError: feature_json["band_derived"] = {} feature_json["band_derived"][k] = value if params.product.band_drill: if pixel_ds is None: data = stacker.data([d], skip_corrections=True) pixel_ds = data.isel(**isel_kwargs) drill_section = {} for band in params.product.band_drill: band_val = pixel_ds[band].item() if band_val == pixel_ds[band].nodata: drill_section[band] = "n/a" else: drill_section[band] = pixel_ds[band].item() drill[idx_date.strftime("%Y-%m-%d")] = drill_section if drill: feature_json["time_drill"] = drill feature_json["datasets_read"] = len(datasets) my_flags = 0 pqdi = -1 for pqd in pq_datasets: pqdi += 1 idx_date = (pqd.center_time + timedelta(hours=params.product.time_zone)).date() if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val lads = list(available_dates) lads.sort() feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in lads ] feature_json["data_links"] = sorted(get_s3_browser_uris(datasets)) release_cube(dc) except Exception as e: release_cube(dc) raise e # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) feature_json = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): geo_point_geobox = params.geobox else: geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) stacker = DataStacker(params.product, geo_point_geobox, params.time) # --- Begin code section requiring datacube. service_cfg = get_service_cfg() with cube() as dc: datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = service_cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = service_cfg.published_CRSs[params.crsid]["vertical_coord"] s3_bucket = service_cfg.s3_bucket s3_url = service_cfg.s3_url isel_kwargs = {h_coord: 0, v_coord: 0} if datasets: dataset_date_index = {} tz = None for ds in datasets: if tz is None: crs_geo = geometry.CRS("EPSG:4326") ptg = geo_point.to_crs(crs_geo) tz = tz_for_coord(ptg.coords[0][0], ptg.coords[0][1]) ld = local_date(ds, tz=tz) if ld in dataset_date_index: dataset_date_index[ld].append(ds) else: dataset_date_index[ld] = [ds] # Group datasets by time, load only datasets that match the idx_date available_dates = dataset_date_index.keys() ds_at_time = dataset_date_index.get(params.time, []) _LOG.info("%d datasets, %d at target date", len(datasets), len(ds_at_time)) if len(ds_at_time) > 0: pixel_ds = None data = stacker.data( ds_at_time, skip_corrections=True, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[isel_kwargs[h_coord]].item() y = data_y[isel_kwargs[v_coord]].item() pt = geometry.point(x, y, params.crs) if params.product.multi_product: feature_json["source_product"] = "%s (%s)" % ( ds_at_time[0].type.name, ds_at_time[0].metadata_doc["platform"]["code"]) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = dataset_center_time( ds_at_time[0]).strftime("%Y-%m-%d %H:%M:%S UTC") # Collect raw band values for pixel and derived bands from styles feature_json["bands"] = _make_band_dict( params.product, pixel_ds, stacker.needed_bands()) derived_band_dict = _make_derived_band_dict( pixel_ds, params.product.style_index) if derived_band_dict: feature_json["band_derived"] = derived_band_dict if callable(params.product.feature_info_include_custom): additional_data = params.product.feature_info_include_custom( feature_json["bands"]) feature_json.update(additional_data) my_flags = 0 for pqd in pq_datasets: idx_date = dataset_center_time(pqd) if idx_date == params.time: pq_data = stacker.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = params.product.pq_product.measurements[ params.product.pq_band] flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): if mk in params.product.ignore_flags_info: continue bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in sorted(available_dates) ] feature_json["data_links"] = sorted( get_s3_browser_uris(ds_at_time, s3_url, s3_bucket)) if params.product.feature_info_include_utc_dates: feature_json["data_available_for_utc_dates"] = sorted( d.center_time.strftime("%Y-%m-%d") for d in datasets) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def p(lon): return point(lon, 0, 'epsg:4326')
def feature_info(args): # Version parameter version = get_arg(args, "version", "WMS version", permitted_values=["1.1.1", "1.3.0"]) # Layer/product product = get_product_from_arg(args, "query_layers") fmt = get_arg(args, "info_format", "info format", lower=True, errcode=WMSException.INVALID_FORMAT, permitted_values=["application/json"]) # CRS parameter if version == "1.1.1": crs_arg = "srs" else: crs_arg = "crs" crsid = get_arg(args, crs_arg, "Coordinate Reference System", errcode=WMSException.INVALID_FORMAT, permitted_values=service_cfg["published_CRSs"].keys()) crs = geometry.CRS(crsid) # BBox, height and width parameters geobox = _get_geobox(args, crs) # Time parameter time = get_time(args, product) # Point coords if version == "1.1.1": coords = ["x", "y"] else: coords = ["i", "j"] i = args.get(coords[0]) j = args.get(coords[1]) if i is None: raise WMSException("HorizontalCoordinate not supplied", WMSException.INVALID_POINT, "%s parameter" % coords[0]) if j is None: raise WMSException("Vertical coordinate not supplied", WMSException.INVALID_POINT, "%s parameter" % coords[0]) i = int(i) j = int(j) # Prepare to extract feature info tiler = RGBTileGenerator(product, geobox, time) feature_json = {} # --- Begin code section requiring datacube. dc = get_cube() geo_point = img_coords_to_geopoint(geobox, i, j) datasets = tiler.datasets(dc.index, all_time=True, point=geo_point) pq_datasets = tiler.datasets(dc.index, mask=True, all_time=True, point=geo_point) if service_cfg["published_CRSs"][crsid]["geographic"]: h_coord = "longitude" v_coord = "latitude" else: h_coord = service_cfg["published_CRSs"][crsid]["horizontal_coord"] v_coord = service_cfg["published_CRSs"][crsid]["vertical_coord"] isel_kwargs = {h_coord: [i], v_coord: [j]} if not datasets: pass else: available_dates = set() for d in datasets: idx_date = (d.center_time + timedelta(hours=product.time_zone)).date() available_dates.add(idx_date) if idx_date == time and "lon" not in feature_json: data = tiler.data([d]) # Use i,j image coordinates to extract data pixel from dataset, and # convert to lat/long geographic coordinates if service_cfg["published_CRSs"][crsid]["geographic"]: # Geographic coordinate systems (e.g. EPSG:4326/WGS-84) are already in lat/long feature_json["lat"] = data.latitude[j].item() feature_json["lon"] = data.longitude[i].item() pixel_ds = data.isel(**isel_kwargs) else: # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(data, h_coord) data_y = getattr(data, v_coord) x = data_x[i].item() y = data_y[j].item() pt = geometry.point(x, y, crs) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[0] # Extract data pixel pixel_ds = data.isel(**isel_kwargs) # Get accurate timestamp from dataset feature_json["time"] = d.center_time.strftime( "%Y-%m-%d %H:%M:%S UTC") # Collect raw band values for pixel feature_json["bands"] = {} for band in tiler.needed_bands(): band_val = pixel_ds[band].item() if band_val == -999: feature_json["bands"][band] = "n/a" else: feature_json["bands"][band] = pixel_ds[band].item() my_flags = 0 for pqd in pq_datasets: idx_date = (pqd.center_time + timedelta(hours=product.time_zone)).date() if idx_date == time: pq_data = tiler.data([pqd], mask=True) pq_pixel_ds = pq_data.isel(**isel_kwargs) # PQ flags m = product.pq_product.measurements[product.pq_band] flags = pq_pixel_ds[product.pq_band].item() my_flags = flags | flags feature_json["flags"] = {} for mk, mv in m["flags_definition"].items(): bits = mv["bits"] values = mv["values"] if not isinstance(bits, int): continue flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] feature_json["flags"][mk] = val lads = list(available_dates) lads.sort() feature_json["data_available_for_dates"] = [ d.strftime("%Y-%m-%d") for d in lads ] release_cube(dc) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json.dumps(result), 200, resp_headers( {"Content-Type": "application/json"})
def test_ops(): box1 = geometry.box(10, 10, 30, 30, crs=epsg4326) box2 = geometry.box(20, 10, 40, 30, crs=epsg4326) box3 = geometry.box(20, 10, 40, 30, crs=epsg4326) box4 = geometry.box(40, 10, 60, 30, crs=epsg4326) no_box = None assert box1 != box2 assert box2 == box3 assert box3 != no_box union1 = box1.union(box2) assert union1.area == 600.0 with pytest.raises(geometry.CRSMismatchError): box1.union(box2.to_crs(epsg3857)) inter1 = box1.intersection(box2) assert bool(inter1) assert inter1.area == 200.0 inter2 = box1.intersection(box4) assert not bool(inter2) assert inter2.is_empty # assert not inter2.is_valid TODO: what's going on here? diff1 = box1.difference(box2) assert diff1.area == 200.0 symdiff1 = box1.symmetric_difference(box2) assert symdiff1.area == 400.0 # test segmented line = geometry.line([(0, 0), (0, 5), (10, 5)], epsg4326) line2 = line.segmented(2) assert line.crs is line2.crs assert line.length == line2.length assert len(line.coords) < len(line2.coords) poly = geometry.polygon([(0, 0), (0, 5), (10, 5)], epsg4326) poly2 = poly.segmented(2) assert poly.crs is poly2.crs assert poly.length == poly2.length assert poly.area == poly2.area assert len(poly.geom.exterior.coords) < len(poly2.geom.exterior.coords) poly2 = poly.exterior.segmented(2) assert poly.crs is poly2.crs assert poly.length == poly2.length assert len(poly.geom.exterior.coords) < len(poly2.geom.coords) # test interpolate pt = line.interpolate(1) assert pt.crs is line.crs assert pt.coords[0] == (0, 1) assert isinstance(pt.coords, list) with pytest.raises(TypeError): pt.interpolate(3) # test array interface assert line.__array_interface__ is not None assert np.array(line).shape == (3, 2) # test simplify poly = geometry.polygon([(0, 0), (0, 5), (10, 5)], epsg4326) assert poly.simplify(100) == poly # test iteration poly_2_parts = geometry.Geometry( { "type": "MultiPolygon", "coordinates": [[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]], [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]] }, 'EPSG:4326') pp = list(poly_2_parts) assert len(pp) == 2 assert all(p.crs == poly_2_parts.crs for p in pp) # test transform assert geometry.point( 0, 0, epsg4326).transform(lambda x, y: (x + 1, y + 2)) == geometry.point( 1, 2, epsg4326) # test sides box = geometry.box(1, 2, 11, 22, epsg4326) ll = list(geometry.sides(box)) assert all(l.crs is epsg4326 for l in ll) assert len(ll) == 4 assert ll[0] == geometry.line([(1, 2), (1, 22)], epsg4326) assert ll[1] == geometry.line([(1, 22), (11, 22)], epsg4326) assert ll[2] == geometry.line([(11, 22), (11, 2)], epsg4326) assert ll[3] == geometry.line([(11, 2), (1, 2)], epsg4326)
def feature_info(args): # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals # Parse GET parameters params = GetFeatureInfoParameters(args) feature_json = {} geo_point = img_coords_to_geopoint(params.geobox, params.i, params.j) # shrink geobox to point # Prepare to extract feature info if geobox_is_point(params.geobox): geo_point_geobox = params.geobox else: geo_point_geobox = datacube.utils.geometry.GeoBox.from_geopolygon( geo_point, params.geobox.resolution, crs=params.geobox.crs) tz = tz_for_geometry(geo_point_geobox.geographic_extent) stacker = DataStacker(params.product, geo_point_geobox, params.times) # --- Begin code section requiring datacube. cfg = get_config() with cube() as dc: if not dc: raise WMSException("Database connectivity failure") datasets = stacker.datasets(dc.index, all_time=True, point=geo_point) # Taking the data as a single point so our indexes into the data should be 0,0 h_coord = cfg.published_CRSs[params.crsid]["horizontal_coord"] v_coord = cfg.published_CRSs[params.crsid]["vertical_coord"] s3_bucket = cfg.s3_bucket s3_url = cfg.s3_url isel_kwargs = {h_coord: 0, v_coord: 0} if any(datasets): # Group datasets by time, load only datasets that match the idx_date global_info_written = False feature_json["data"] = [] fi_date_index = {} ds_at_times = collapse_datasets_to_times(datasets, params.times, tz) # ds_at_times["time"].attrs["units"] = 'seconds since 1970-01-01 00:00:00' if ds_at_times: data = stacker.data( ds_at_times, skip_corrections=True, manual_merge=params.product.data_manual_merge, fuse_func=params.product.fuse_func) for dt in data.time.values: td = data.sel(time=dt) # Global data that should apply to all dates, but needs some data to extract if not global_info_written: global_info_written = True # Non-geographic coordinate systems need to be projected onto a geographic # coordinate system. Why not use EPSG:4326? # Extract coordinates in CRS data_x = getattr(td, h_coord) data_y = getattr(td, v_coord) x = data_x[isel_kwargs[h_coord]].item() y = data_y[isel_kwargs[v_coord]].item() pt = geometry.point(x, y, params.crs) # Project to EPSG:4326 crs_geo = geometry.CRS("EPSG:4326") ptg = pt.to_crs(crs_geo) # Capture lat/long coordinates feature_json["lon"], feature_json["lat"] = ptg.coords[ 0] date_info = {} ds = ds_at_times.sel(time=dt).values.tolist()[0] if params.product.multi_product: date_info["source_product"] = "%s (%s)" % ( ds.type.name, ds.metadata_doc["platform"]["code"]) # Extract data pixel pixel_ds = td.isel(**isel_kwargs) # Get accurate timestamp from dataset if params.product.is_raw_time_res: date_info["time"] = dataset_center_time(ds).strftime( "%Y-%m-%d %H:%M:%S UTC") else: date_info["time"] = ds.time.begin.strftime("%Y-%m-%d") # Collect raw band values for pixel and derived bands from styles date_info["bands"] = _make_band_dict( params.product, pixel_ds, stacker.needed_bands()) derived_band_dict = _make_derived_band_dict( pixel_ds, params.product.style_index) if derived_band_dict: date_info["band_derived"] = derived_band_dict # Add any custom-defined fields. for k, f in params.product.feature_info_custom_includes.items( ): date_info[k] = f(date_info["bands"]) feature_json["data"].append(date_info) fi_date_index[dt] = feature_json["data"][-1] my_flags = 0 if params.product.pq_names == params.product.product_names: pq_datasets = ds_at_times else: pq_datasets = stacker.datasets(dc.index, mask=True, all_time=False, point=geo_point) if pq_datasets: if not params.product.pq_ignore_time: pq_datasets = collapse_datasets_to_times( pq_datasets, params.times, tz) pq_data = stacker.data(pq_datasets, mask=True) # feature_json["flags"] = [] for dt in pq_data.time.values: pqd = pq_data.sel(time=dt) date_info = fi_date_index.get(dt) if date_info: if "flags" not in date_info: date_info["flags"] = {} else: date_info = {"flags": {}} feature_json["data"].append(date_info) pq_pixel_ds = pqd.isel(**isel_kwargs) # PQ flags flags = pq_pixel_ds[params.product.pq_band].item() if not flags & ~params.product.info_mask: my_flags = my_flags | flags else: continue for mk, mv in params.product.flags_def.items(): if mk in params.product.ignore_info_flags: continue bits = mv["bits"] values = mv["values"] if isinstance(bits, int): flag = 1 << bits if my_flags & flag: val = values['1'] else: val = values['0'] date_info["flags"][mk] = val else: try: for i in bits: if not isinstance(i, int): raise TypeError() # bits is a list of ints try to do it alos way for key, desc in values.items(): if (isinstance(key, str) and key == str(my_flags)) or (isinstance( key, int) and key == my_flags): date_info["flags"][mk] = desc break except TypeError: pass feature_json["data_available_for_dates"] = [] for d in datasets.coords["time"].values: dt_datasets = datasets.sel(time=d) dt = datetime.utcfromtimestamp(d.astype(int) * 1e-9) if params.product.is_raw_time_res: dt = solar_date(dt, tz) pt_native = None for ds in dt_datasets.values.item(): if pt_native is None: pt_native = geo_point.to_crs(ds.crs) elif pt_native.crs != ds.crs: pt_native = geo_point.to_crs(ds.crs) if ds.extent and ds.extent.contains(pt_native): feature_json["data_available_for_dates"].append( dt.strftime("%Y-%m-%d")) break if ds_at_times: feature_json["data_links"] = sorted( get_s3_browser_uris(ds_at_times, pt, s3_url, s3_bucket)) else: feature_json["data_links"] = [] if params.product.feature_info_include_utc_dates: unsorted_dates = [] for tds in datasets: for ds in tds.values.item(): if params.product.time_resolution.is_raw_time_res: unsorted_dates.append( ds.center_time.strftime("%Y-%m-%d")) else: unsorted_dates.append( ds.time.begin.strftime("%Y-%m-%d")) feature_json["data_available_for_utc_dates"] = sorted( d.center_time.strftime("%Y-%m-%d") for d in datasets) # --- End code section requiring datacube. result = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": feature_json }] } return json_response(result, cfg)
def reproject_point(pos): pos = point(pos['lon'], pos['lat'], CRS('EPSG:4326')) coords = pos.to_crs(crs).coords[0] return {'x': coords[0], 'y': coords[1]}
def to_crs(self, new_crs): grid = self.layer.grids[new_crs] skip_x_xform = False skip_y_xform = False if self.crs != new_crs: if not self.subsetted.x and not self.subsetted.y: # Neither axis subsetted self.min.x = self.layer.ranges["bboxes"][new_crs]["left"] self.max.x = self.layer.ranges["bboxes"][new_crs]["right"] self.min.y = self.layer.ranges["bboxes"][new_crs]["bottom"] self.max.y = self.layer.ranges["bboxes"][new_crs]["top"] self.crs = new_crs elif not self.subsetted.x or not self.subsetted.y: # One axis subsetted if self.subsetted.x: self.min.y = self.layer.ranges["bboxes"][self.crs]["bottom"] self.max.y = self.layer.ranges["bboxes"][self.crs]["top"] skip_y_xform = True if self.subsetted.y: self.min.x = self.layer.ranges["bboxes"][self.crs]["left"] self.max.x = self.layer.ranges["bboxes"][self.crs]["right"] skip_x_xform = True else: # Both axes subsetted pass if self.crs != new_crs: is_point = False # Prepare geometry for transformation old_crs_obj = self.cfg.crs(self.crs) if self.is_slice("x") and self.is_slice("y"): geom = geometry.point(self.min.x, self.min.y, old_crs_obj) is_point = True elif self.is_slice("x") or self.is_slice("y"): geom = geometry.line( ( (self.min.x, self.min.y), (self.max.x, self.max.y) ), old_crs_obj) else: geom = geometry.polygon( ( (self.min.x, self.min.y), (self.min.x, self.max.y), (self.max.x, self.max.y), (self.max.x, self.min.y), (self.min.x, self.min.y), ), old_crs_obj ) new_crs_obj = self.cfg.crs(new_crs) grid = self.layer.grids[new_crs] if is_point: prj_pt = geom.to_crs(new_crs_obj) x, y = prj_pt.coords[0] self.min.set(x, y) self.max.set(x + grid["resolution"][0], y + grid["resolution"][1]) self.size.set(1, 1) else: proj_geom = geom.to_crs(new_crs_obj) bbox = proj_geom.boundingbox if skip_x_xform: self.min.x = self.layer.ranges["bboxes"][new_crs]["left"] self.max.x = self.layer.ranges["bboxes"][new_crs]["right"] else: self.min.x = bbox.left self.max.x = bbox.right if skip_y_xform: self.min.y = self.layer.ranges["bboxes"][new_crs]["bottom"] self.max.y = self.layer.ranges["bboxes"][new_crs]["top"] else: self.min.y = bbox.bottom self.max.y = bbox.top self.quantise_to_resolution(grid) self.crs = new_crs else: self.quantise_to_resolution(grid)
def img_coords_to_geopoint(geobox, i, j): return geometry.point(geobox.coordinates["x"].values[int(i)], geobox.coordinates["y"].values[int(j)], geobox.crs)