def add_item(self, path_pro: Optional[str], path_tiff: Optional[str], path_img: Optional[str]) -> None: assert path_pro.endswith(".pro") file_name = path_pro.split("\\")[-1].rstrip(".pro") print(file_name) b0, data = parse(path_pro) item: Item = stac.create_item(i_id=file_name, metadata=b0) assets: List[Asset] = [ Asset(href=path_pro, media_type="pro") ] if path_tiff is not None: assets.append(Asset(href=path_tiff, media_type="geotiff")) if path_img is not None: assets.append(Asset(href=path_img, media_type="img")) stac.add_assets(item, assets) catalog = self.root_catalog.get_child(str(b0["b0_common"]["satId"][0])) if catalog is None: extent = Extent(spatial=SpatialExtent([[-180, -90, 180, 90]]), # TODO: Реальный Extent temporal=TemporalExtent([[ datetime.strptime("2009-01-01T00:00:00.000000", "%Y-%m-%dT%H:%M:%S.%f"), None]])) catalog = Collection(id=str(b0["b0_common"]["satId"][0]), title=b0["b0_common"]["satName"][0].decode("utf-8"), description=f"Catalog for satellite " f"{b0['b0_common']['satName'][0].decode('utf-8')}", extent=extent) self.root_catalog.add_child(catalog, catalog.title) # update_collection_extent(item, catalog) catalog.add_item(item)
def test_temporal_extent_init_typing(self) -> None: # This test exists purely to test the typing of the intervals argument to # TemporalExtent start_datetime = str_to_datetime("2022-01-01T00:00:00Z") end_datetime = str_to_datetime("2022-01-31T23:59:59Z") _ = TemporalExtent([[start_datetime, end_datetime]])
def test_eo_items_are_heritable(self): item1 = Item(id='test-item-1', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={'key': 'one'}, stac_extensions=['eo', 'commons']) item2 = Item(id='test-item-2', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={'key': 'two'}, stac_extensions=['eo', 'commons']) wv3_bands = [ Band.create(name='Coastal', description='Coastal: 400 - 450 nm', common_name='coastal'), Band.create(name='Blue', description='Blue: 450 - 510 nm', common_name='blue'), Band.create(name='Green', description='Green: 510 - 580 nm', common_name='green'), Band.create(name='Yellow', description='Yellow: 585 - 625 nm', common_name='yellow'), Band.create(name='Red', description='Red: 630 - 690 nm', common_name='red'), Band.create(name='Red Edge', description='Red Edge: 705 - 745 nm', common_name='rededge'), Band.create(name='Near-IR1', description='Near-IR1: 770 - 895 nm', common_name='nir08'), Band.create(name='Near-IR2', description='Near-IR2: 860 - 1040 nm', common_name='nir09') ] spatial_extent = SpatialExtent(bboxes=[RANDOM_BBOX]) temporal_extent = TemporalExtent(intervals=[[item1.datetime, None]]) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) common_properties = { 'eo:bands': [b.to_dict() for b in wv3_bands], 'gsd': 0.3, 'eo:platform': 'Maxar', 'eo:instrument': 'WorldView3' } collection = Collection(id='test', description='test', extent=collection_extent, properties=common_properties, stac_extensions=['commons'], license='CC-BY-SA-4.0') collection.add_items([item1, item2]) with TemporaryDirectory() as tmp_dir: collection.normalize_hrefs(tmp_dir) collection.save(catalog_type=CatalogType.SELF_CONTAINED) read_col = Collection.from_file('{}/collection.json'.format(tmp_dir)) items = list(read_col.get_all_items()) self.assertEqual(len(items), 2) self.assertTrue(items[0].ext.implements('eo')) self.assertTrue(items[1].ext.implements('eo'))
def test_supplying_href_in_init_does_not_fail(self) -> None: test_href = "http://example.com/collection.json" spatial_extent = SpatialExtent(bboxes=[ARBITRARY_BBOX]) temporal_extent = TemporalExtent(intervals=[[TEST_DATETIME, None]]) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) collection = Collection( id="test", description="test desc", extent=collection_extent, href=test_href ) self.assertEqual(collection.get_self_href(), test_href)
def collection_update_extents(collection): bounds = GeometryCollection( [shape(s.geometry) for s in collection.get_all_items()]).bounds collection.extent.spatial = SpatialExtent(bounds) dates = { i.datetime for i in collection.get_all_items() if isinstance(i.datetime, datetime) } if len(dates) == 1: collection.extent.temporal = TemporalExtent([(next(iter(dates)), None) ]) elif len(dates) > 1: collection.extent.temporal = TemporalExtent([(min(dates), max(dates))]) else: print("WARN: {} has no TemporalExtent. Dates: {}".format( collection.id, dates)) collection.extent.temporal = TemporalExtent([ (datetime(1900, 1, 1, 0, 0, 0), None) ])
def test_supplying_href_in_init_does_not_fail(self): test_href = "http://example.com/collection.json" spatial_extent = SpatialExtent(bboxes=[RANDOM_BBOX]) temporal_extent = TemporalExtent(intervals=[[TEST_DATETIME, None]]) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) collection = Collection(id='test', description='test desc', extent=collection_extent, properties={}, href=test_href) self.assertEqual(collection.get_self_href(), test_href)
def test_spatial_allows_single_bbox(self) -> None: temporal_extent = TemporalExtent(intervals=[[TEST_DATETIME, None]]) # Pass in a single BBOX spatial_extent = SpatialExtent(bboxes=ARBITRARY_BBOX) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) collection = Collection( id="test", description="test desc", extent=collection_extent ) # HREF required by validation collection.set_self_href("https://example.com/collection.json") collection.validate()
def test_spatial_allows_single_bbox(self): temporal_extent = TemporalExtent(intervals=[[TEST_DATETIME, None]]) # Pass in a single BBOX spatial_extent = SpatialExtent(bboxes=RANDOM_BBOX) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) collection = Collection(id='test', description='test desc', extent=collection_extent) # HREF required by validation collection.set_self_href('https://example.com/collection.json') collection.validate()
RANDOM_GEOM = { "type": "Polygon", "coordinates": [[[-2.5048828125, 3.8916575492899987], [-1.9610595703125, 3.8916575492899987], [-1.9610595703125, 4.275202171119132], [-2.5048828125, 4.275202171119132], [-2.5048828125, 3.8916575492899987]]] } RANDOM_BBOX = [ RANDOM_GEOM['coordinates'][0][0][0], RANDOM_GEOM['coordinates'][0][0][1], RANDOM_GEOM['coordinates'][0][1][0], RANDOM_GEOM['coordinates'][0][1][1] ] RANDOM_EXTENT = Extent(spatial=SpatialExtent.from_coordinates(RANDOM_GEOM['coordinates']), temporal=TemporalExtent.from_now()) # noqa: E126 class TestCases: @staticmethod def get_path(rel_path): return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', rel_path)) @staticmethod def get_examples_info(): examples = [] info_path = TestCases.get_path('data-files/examples/example-info.csv') with open(TestCases.get_path('data-files/examples/example-info.csv')) as f: for row in csv.reader(f): path = os.path.abspath(os.path.join(os.path.dirname(info_path), row[0]))
"Natural Resources Canada Centre for Topographic Information", ["licensor", "processor"], "www.geobase.ca", ), Provider("Sparkgeo", "*****@*****.**", ["processor", "host"], "www.sparkgeo.com"), Provider( "PCI Geomatics", "*****@*****.**", ["processor", "host"], "www.pcigeomatics.com" ), ] SpotExtents = Extent( SpatialExtent([[0.0, 0.0, 0.0, 0.0]]), TemporalExtent( [ [ datetime.strptime("2005-01-01", "%Y-%m-%d"), datetime.strptime("2010-01-01", "%Y-%m-%d"), ] ] ), ) OrthoCollection = Collection( id="canada_spot_orthoimages", description="Orthoimages of Canada 2005-2010", extent=SpotExtents, title=None, stac_extensions=None, license="Proprietery", keywords="SPOT, Geobase, orthoimages", version="0.0.1", providers=SpotProviders,
# these objects are ultimately assets that we'd like to group by tile ID, we do that here imagery_objects = [ obj for obj in objects if obj.content_type == "image/tiff" ] imagery_grouped = groupby(imagery_objects, key=lambda obj: obj.key.split("/")[-2]) sentinelhub_request = json.loads([ obj for obj in objects if obj.key.endswith("json") ][0].get()["Body"].read().decode("utf-8")) time_range = sentinelhub_request["processRequest"]["input"]["data"][0][ "dataFilter"]["timeRange"] start_time = datetime.fromisoformat(time_range["from"][:-1]) end_time = datetime.fromisoformat(time_range["to"][:-1]) temporal_extent = TemporalExtent(intervals=[[start_time, end_time]]) stac_items = [] for group_id, image_group in imagery_grouped: # assemble assets so that they might be grouped (without duplication) in items assets = [] for image in image_group: s3_path = "s3://" + image.bucket_name + "/" + image.key # The extents should be the same, so whichever one is checked last should be fine with rio.open(s3_path) as img: bounds = img.bounds assets.append(Asset(s3_path)) if aggregate_bounds is None:
with open("{}/{}/{}-usfimr.wkt".format(root_path, fid, fid), "w") as wkt_file: wkt_file.write(shapely_geom.wkt) with open("{}/{}/{}-usfimr.wkb".format(root_path, fid, fid), "wb") as wkb_file: wkb_file.write(shapely_geom.wkb) with open("{}/{}/{}-usfimr.geojson".format(root_path, fid, fid), "w") as geojson_file: geojson_file.write(json.dumps(geom)) overall_extent = Extent( SpatialExtent(running_spatial_extent), TemporalExtent([[running_start_dt, running_end_dt]]), ) root_collection = Collection( id="USFIMR", description= "GloFIMR is an extension of the USFIMR project that commenced in August 2016 with funding from NOAA. The project’s main goal is to provide high-resolution inundation extent maps of flood events to be used by scientists and practitioners for model calibration and flood susceptibility evaluation. The maps are based on analysis of Remote Sensing imagery from a number of Satellite sensors (e.g. Landsat, Sentinel-1, Sentinel-2). The maps are accessible via the online map repository below. The repository is under development and new maps are added upon request.", title="U.S. Flood Inundation Mapping Repository", extent=overall_extent, ) for item in items: root_collection.add_item(item) # Save Complete Catalog root_collection.normalize_and_save(root_path,
huc_item.add_asset(key="catchhuc", asset=catchhuc_asset) hydrogeo_asset = Asset( href="{}/{}/hydrogeo-fulltable-{}.csv".format(args.root_uri, fid, fid), description="Hydraulic property table with the following fields: CatchId, Stage, Number of Cells, SurfaceArea (m2), BedArea (m2), Volume (m3), SLOPE, LENGTHKM, AREASQKM, Roughness, TopWidth (m), WettedPerimeter (m), WetArea (m2), HydraulicRadius (m), Discharge (m3s-1)", media_type="text/csv", ) huc_item.add_asset(key="hydrogeo", asset=hydrogeo_asset) items.append(huc_item) overall_extent = Extent( SpatialExtent( [running_extent[0], running_extent[1], running_extent[2], running_extent[3]] ), TemporalExtent([[version_dt, None]]), ) # Root Collection root_collection = Collection( id="hand_021", description="The continental flood inundation mapping (CFIM) framework is a high-performance computing (HPC)-based computational framework for the Height Above Nearest Drainage (HAND)-based inundation mapping methodology. Using the 10m Digital Elevation Model (DEM) data produced by U.S. Geological Survey (USGS) 3DEP (the 3-D Elevation Program) and the NHDPlus hydrography dataset produced by USGS and the U.S. Environmental Protection Agency (EPA), a hydrological terrain raster called HAND is computed for HUC6 units in the conterminous U.S. (CONUS). The value of each raster cell in HAND is an approximation of the relative elevation between the cell and its nearest water stream. Derived from HAND, a hydraulic property table is established to calculate river geometry properties for each of the 2.7 million river reaches covered by NHDPlus (5.5 million kilometers in total length). This table is a lookup table for water depth given an input stream flow value. Such lookup is available between water depth 0m and 25m at 1-foot interval. The flood inundation map can then be computed by using HAND and this lookup table based on the near real-time water forecast from the National Water Model (NWM) at the National Oceanic and Atmospheric Administration (NOAA).", title="HAND and the Hydraulic Property Table version 0.2.1", extent=overall_extent, license="CC-BY-4.0", ) for item in items: root_collection.add_item(item) # Save Complete Catalog root_path = "./data/catalog"
[-1.9610595703125, 4.275202171119132], [-2.5048828125, 4.275202171119132], [-2.5048828125, 3.8916575492899987], ]], } ARBITRARY_BBOX: List[float] = [ ARBITRARY_GEOM["coordinates"][0][0][0], ARBITRARY_GEOM["coordinates"][0][0][1], ARBITRARY_GEOM["coordinates"][0][1][0], ARBITRARY_GEOM["coordinates"][0][1][1], ] ARBITRARY_EXTENT = Extent( spatial=SpatialExtent.from_coordinates(ARBITRARY_GEOM["coordinates"]), temporal=TemporalExtent.from_now(), ) class ExampleInfo: def __init__( self, path: str, object_type: pystac.STACObjectType, stac_version: str, extensions: List[str], valid: bool, ) -> None: self.path = path self.object_type = object_type self.stac_version = stac_version
collection_description = ( "JRC Global Monthly Water around the Mississippi river system", ) collection_title = "Global Monthly Water: Mississippi river system" spatial_extent = SpatialExtent([[ 29.038948834106055, -92.72807246278022, 42.55475543734189, -88.02592402528022, ]]) # The JRC water dataset examines imagery from march, 1984 to december, 2019 start_dt = datetime.combine(date(1984, 3, 1), time.min) end_dt = datetime.combine(date(2019, 12, 1), time.min) collection_temporal_extent = TemporalExtent(intervals=[[start_dt, end_dt]]) collection = Collection( id="jrc-monthly-water-mississippi-river", description=collection_description, extent=Extent(spatial_extent, collection_temporal_extent), title=collection_title, ) s3 = boto3.resource("s3") bucket = s3.Bucket(bucket) prefix = parsed_s3_path.path.lstrip("/") filtered_objects = bucket.objects.filter(Prefix=prefix) for obj_summary in filtered_objects: extension = obj_summary.key.split(".")[-1]