def test_collection_of_collection(): space = pystac.SpatialExtent([[0, 1, 2, 3]]) time = pystac.TemporalExtent([datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 1)]) child = pystac.Collection('child', 'child-description', extent=pystac.Extent(space, time)) parent = pystac.Collection('parent', 'parent-description', extent=pystac.Extent(space, time),) parent.add_child(child) result = StacCollection(parent) result._load()
def get_aviris_cog_collection(level): if level not in [L1, L2]: raise Exception(f'{level} is not a valid level.') description = ('AVIRIS L2 Refl Imagery converted to pixel-interleaved COGs' if level == L2 else 'AVIRIS L1 Imagery converted to pixel-interleaved COGs') collection = pystac.Collection(f'aviris-{level}-cogs', description, pystac.Extent( pystac.SpatialExtent( [[-180, -90, 180, 90]]), pystac.TemporalExtent([[ datetime(2014, 1, 1, tzinfo=timezone.utc), datetime(2020, 1, 1, tzinfo=timezone.utc), ]]), ), stac_extensions=COG_COLLECTION_EXTENSIONS) collection.links = [] collection.properties = {} collection.properties['eo:bands'] = [{ 'name': b, 'center_wavelength': f } for (b, f) in AVIRIS_BANDS_FREQS] collection.properties['hsi:wavelength_min'] = min(AVIRIS_FREQS) collection.properties['hsi:wavelength_max'] = max(AVIRIS_FREQS) return collection
def create_collection(seasons: List[int]) -> pystac.Collection: """Creates a STAC COllection for NAIP data. Args: seasons (List[int]): List of years that represent the NAIP seasons this collection represents. """ extent = pystac.Extent( pystac.SpatialExtent(bboxes=[[-124.784, 24.744, -66.951, 49.346]]), pystac.TemporalExtent(intervals=[[ pystac.utils.str_to_datetime(f"{min(seasons)}-01-01T00:00:00Z"), pystac.utils.str_to_datetime(f"{max(seasons)}-01-01T00:00:00Z") ]])) collection = pystac.Collection( id=constants.NAIP_ID, description=constants.NAIP_DESCRIPTION, title=constants.NAIP_TITLE, license=constants.NAIP_LICENSE, providers=[constants.USDA_PROVIDER], extent=extent, stac_extensions=['item-assets'], extra_fields={ 'item_assets': { 'image': { "eo:bands": [b.properties for b in constants.NAIP_BANDS], "roles": ["data"], "title": "RGBIR COG tile", "type": pystac.MediaType.COG }, } }) return collection
def make_collection() -> pystac.Collection: asset_id = "my/thing" start = datetime.datetime(2018, 8, 24) end = start + datetime.timedelta(5, 4, 3, 2, 1) bboxes = [[-180.0, -90.0, 180.0, 90.0]] spatial_extent = pystac.SpatialExtent(bboxes) intervals: List[List[Optional[datetime.datetime]]] = [[start, end]] temporal_extent = pystac.TemporalExtent(intervals) extent = pystac.Extent(spatial_extent, temporal_extent) collection = pystac.Collection(asset_id, "desc", extent) collection.set_self_href(URL_TEMPLATE % 2019) ScientificExtension.add_to(collection) return collection
def main(): df = AvirisClassic.as_df("aviris-flight-lines.csv") collection = pystac.Collection( AvirisClassic.COLLECTION_NAME, AVIRIS_DESCRIPTION, pystac.Extent( spatial=pystac.SpatialExtent([[None, None, None, None]]), temporal=pystac.TemporalExtent( [[datetime(1970, 1, 1, tzinfo=timezone.utc), None]]), ), ) stacframes.df_to(collection, df) df_ng = AvirisNg.as_df("aviris-ng-flight-lines.csv") collection_ng = pystac.Collection( AvirisNg.COLLECTION_NAME, AVIRIS_DESCRIPTION, pystac.Extent( spatial=pystac.SpatialExtent([[None, None, None, None]]), temporal=pystac.TemporalExtent( [[datetime(1970, 1, 1, tzinfo=timezone.utc), None]]), ), ) stacframes.df_to(collection_ng, df_ng) # Normalize before validation to set all the required object links catalog = pystac.Catalog("aviris", AVIRIS_DESCRIPTION) catalog.add_child(collection) catalog.add_child(collection_ng) catalog_path = "./data/catalog" catalog.normalize_hrefs(catalog_path) logger.info("Validating catalog...") catalog.validate_all() logger.info("Saving catalog to {}...".format(catalog_path)) catalog.save(catalog_type=pystac.CatalogType.SELF_CONTAINED) logger.info("Done!")
def make_collection(year: int) -> pystac.Collection: asset_id = f'my/collection/of/things/{year}' start = datetime.datetime(2014, 8, 10) end = datetime.datetime(year, 1, 3, 4, 5) bboxes = [[-180, -90, 180, 90]] spatial_extent = pystac.SpatialExtent(bboxes) temporal_extent = pystac.TemporalExtent([[start, end]]) extent = pystac.Extent(spatial_extent, temporal_extent) collection = pystac.Collection(asset_id, 'desc', extent) collection.set_self_href(URL_TEMPLATE % year) collection.ext.enable(pystac.Extensions.VERSION) return collection
def make_collection(dbs, source='NIC', region='arctic', year='All', root_href='', stacid='', description='', collection_license='MIT'): """ create a collection of STAC items """ if type(dbs) is str: db = StackDB(dbs) elif type(dbs) is StackDB: db = dbs else: print('Please specify a database or database name') return cis = db.get_stac_items(source=source, region=region, year=year) if len(cis) < 1: print('No data found for source {0}, region {1}, year {2}'.format( source, region, year)) return stacs = [] spatial_extent = [] mindate = datetime.datetime.now() maxdate = datetime.datetime.min # make the timestamp bounds offset aware so we can do comparisons mindate = mindate.replace(tzinfo=pytz.UTC) maxdate = maxdate.replace(tzinfo=pytz.UTC) for cs in cis: stac = pystac.stac_object_from_dict(json.loads(cs[0])) stacs.append(stac) mindate = stac.datetime if stac.datetime < mindate else mindate maxdate = stac.datetime if stac.datetime > maxdate else maxdate spatial_extent = biggest_bbox(spatial_extent, stac.bbox) extent = pystac.Extent( spatial=pystac.pystac.SpatialExtent(bboxes=[spatial_extent]), temporal=pystac.TemporalExtent([[mindate, maxdate]])) collection = pystac.Collection(id=stacid, description=description, extent=extent, license=collection_license) collection.add_items(stacs) collection.normalize_hrefs(root_href=root_href) return collection
def catalog(self): """Check if catalog exists and create it otherwise.""" if self.catalog_path is not None and self._catalog is None: if os.path.isfile(self.catalog_path): os.remove(self.catalog_path) if self.with_bbox: self._catalog = pystac.Collection(id="Sen2Like_catalog" if self.sid is None else self.sid, title="Sen2Like Catalog" if self.title is None else self.title, href=self.catalog_path, description="Catalog containing Sen2Like generated products", extent=pystac.Extent(pystac.SpatialExtent([180, -56, 180, 83]), pystac.TemporalExtent([None, None]))) else: self._catalog = pystac.Catalog(id="Sen2Like_catalog" if self.sid is None else self.sid, title="Sen2Like Catalog" if self.title is None else self.title, href=self.catalog_path, description="Catalog containing Sen2Like generated products") return self._catalog
def get_planet_cog_collection(num_bands: int = 4): collection = pystac.Collection( planet_cog_collection_id(num_bands), f'Planet Imagery: {source_collection_id(num_bands)}', pystac.Extent( pystac.SpatialExtent([[-180, -90, 180, 90]]), pystac.TemporalExtent([[ datetime(1307, 10, 13, tzinfo=timezone.utc), datetime(2063, 4, 5, tzinfo=timezone.utc), ]]), ), stac_extensions=COG_COLLECTION_EXTENSIONS) collection.links = [] collection.properties = {} if num_bands == 4: collection.properties[ 'eo:bands'] = PLANET_BANDS[:3] + PLANET_BANDS[4:5] elif num_bands == 5: collection.properties['eo:bands'] = PLANET_BANDS collection.properties['hsi:wavelength_min'] = 440.0 collection.properties['hsi:wavelength_max'] = 950.0 return collection
def main(): """ Pull Copernicus EU Rapid Mapping Activations data from the GeoRSS feed """ sentinel_oauth_id = os.environ.get("SENTINELHUB_OAUTH_ID") sentinel_oauth_secret = os.environ.get("SENTINELHUB_OAUTH_SECRET") if sentinel_oauth_id is None: raise ValueError("Must set SENTINELHUB_OAUTH_ID") if sentinel_oauth_secret is None: raise ValueError("Must set SENTINELHUB_OAUTH_SECRET") events_xml_url = "https://emergency.copernicus.eu/mapping/activations-rapid/feed" events_xml_file = Path("./data/copernicus-rapid-mapping-activations.xml") if not events_xml_file.is_file(): logger.info("Pulling {}...".format(events_xml_url)) urlretrieve(events_xml_url, str(events_xml_file)) event_xml_dir = Path("./data/event-xml") os.makedirs(event_xml_dir, exist_ok=True) # Generate a list of all unique CEMS products (combination of event, aoi, # monitoring type, revision and version) for all flood events in 2019 and 2020 products = [] events_root = ET.parse(events_xml_file).getroot() for event in events_root.iter("item"): category = event.find("category").text.strip().lower() if category != "flood": continue event_id = event.find("guid").text title = event.find("title").text rss_url = event.find("{http://www.iwg-sem.org/}activationRSS").text logger.info(title) description = event.find("description").text event_dts = re.findall( r"Date\/Time of Event \(UTC\):[</b>\s]*?(\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{2}:\d{2})", description, flags=re.MULTILINE, ) if len(event_dts) != 1: logger.warning("{}: Available event date times {}".format( title, event_dts)) raise AssertionError() event_datetime = datetime.strptime( event_dts[0], "%Y-%m-%d %H:%M:%S").replace(tzinfo=timezone.utc) if event_datetime < datetime(2019, 1, 1, 0, 0, 0, tzinfo=timezone.utc): continue event_country = event.find( "{http://www.iwg-sem.org/}activationAffectedCountries").text event_xml_file = Path(event_xml_dir, event_id).with_suffix(".xml") if not event_xml_file.is_file(): logger.info("\tPulling {} GeoRSS: {}...".format( event_id, event_xml_file)) urlretrieve(rss_url, event_xml_file) event_root = ET.parse(event_xml_file).getroot() for item in event_root.iter("item"): try: data_type = item.find("{http://www.gdacs.org/}cemsctype").text except AttributeError: data_type = "" try: product_type = item.find( "{http://www.gdacs.org/}cemsptype").text except AttributeError: product_type = "" # Only care about downloading VECTOR data for Delineation product # More info at https://emergency.copernicus.eu/mapping/ems/rapid-mapping-portfolio if not (data_type == "VECTOR" and (product_type == "DEL" or product_type == "GRA")): continue item_url = urlparse(item.find("link").text) _, _, product_id, version_id = item_url.path.lstrip("/").split("/") ( product_event_id, aoi_id, product_type_id, monitoring_type, revision_id, data_type_id, ) = product_id.split("_") # Some sanity checks to ensure we've parsed our product id string correctly assert event_id == product_event_id assert product_type_id == product_type assert data_type_id == "VECTORS" georss_polygon = item.find( "{http://www.georss.org/georss}polygon").text # Split string, group number pairs, convert to float and swap pairs to lon first polygon = Polygon( map( lambda x: (float(x[1]), float(x[0])), grouper(georss_polygon.split(" "), 2), )) event_product = EventProduct( # Rebuild product_id from scratch because we need to include version "_".join([ event_id, aoi_id, product_type_id, monitoring_type, revision_id, version_id, data_type_id, ]), event_id, event_country, aoi_id, event_datetime.timestamp(), polygon, data_type_id, product_type_id, monitoring_type, revision_id, version_id, urlunparse(item_url), ) products.append(event_product) df = gpd.GeoDataFrame(products) geojson_file = "./data/cems-rapid-mapping-flood-products-2019-2020.geojson" logger.info( "Writing GeoJSON of flood event products to {}".format(geojson_file)) df.to_file(geojson_file, driver="GeoJSON") sentinel_session = get_session(sentinel_oauth_id, sentinel_oauth_secret) catalog = pystac.Catalog( "copernicus-rapid-mapping-floods-2019-2020", "Copernicus Rapid Mapping provisions geospatial information within hours or days from the activation in support of emergency management activities immediately following a disaster. Standardised mapping products are provided: e.g. to ascertain the situation before the event (reference product), to roughly identify and assess the most affected locations (first estimate product), assess the geographical extent of the event (delineation product) or to evaluate the intensity and scope of the damage resulting from the event (grading product). This catalog contains a subset of products for flood events from 2019-2020 that intersect with Sentinel 2 L2A Chips.", title="Copernicus Rapid Mapping Floods 2019-2020", ) s2_collection = pystac.Collection( "Sentinel-2-L2A", "Sentinel 2 L2A images corresponding to CEMS rapid mapping floods", pystac.Extent( pystac.SpatialExtent([None, None, None, None]), pystac.TemporalExtent([( # TODO: Make this more specific by looping actual dts # after ingest datetime(2019, 1, 1, 0, 0, 0, tzinfo=timezone.utc), datetime(2020, 12, 31, 23, 59, 59, tzinfo=timezone.utc), )]), ), ) catalog.add_child(s2_collection) # Loop Products grouped by event id, lookup Sentinel 2 matches for each # Product, and create STAC Items in catalog for any matches sorted_products = sorted(products, key=lambda x: x.event_id) for event_id, event_products in groupby(sorted_products, key=lambda x: x.event_id): for p in event_products: event_datetime = datetime.fromtimestamp(p.event_time, tz=timezone.utc) # Check for sentinel 2 results before anything else, so we # don't do unnecessary work. We'll use these results later # after we've created our STAC Item response = stac_search( p.geometry.bounds, "sentinel-2-l2a", event_datetime - timedelta(hours=12), event_datetime + timedelta(hours=12), sentinel_session, ).json() if len(response["features"]) < 1: logger.debug("No Sentinel 2 results for {}".format( p.product_id)) continue event_collection = catalog.get_child(event_id) if event_collection is None: event_collection = pystac.Collection( event_id, "", pystac.Extent( pystac.SpatialExtent([None, None, None, None]), pystac.TemporalExtent([(event_datetime, None)]), ), ) catalog.add_child(event_collection) pystac_item = pystac.Item( p.product_id, mapping(p.geometry), p.geometry.bounds, event_datetime, properties={ "aoi_id": p.aoi_id, "country": p.event_country, "event_id": p.event_id, "product_type": p.product_type, "data_type": p.data_type, "monitoring_type": p.monitoring_type, "revision": p.revision, "version": p.version, }, ) event_collection.add_item(pystac_item) url_link = pystac.Link("alternate", p.product_link, media_type="text/html") pystac_item.add_link(url_link) # Get or create Item in S2 collection for each match from # SentinelHub and add as links to our Product Item for feature in response["features"]: s2_item = s2_collection.get_item(feature["id"]) if s2_item is None: s2_item = pystac.Item.from_dict(feature) s2_collection.add_item(s2_item) s2_link = pystac.Link( "data", s2_item, link_type=pystac.LinkType.RELATIVE).set_owner(pystac_item) pystac_item.add_link(s2_link) logger.info("Created STAC Item {} with {} Sentinel 2 links".format( p.product_id, len(response["features"]))) # Set spatial extents for collection in catalog.get_children(): if not isinstance(collection, pystac.Collection): continue bounds = GeometryCollection( [shape(s.geometry) for s in collection.get_all_items()]).bounds collection.extent.spatial = pystac.SpatialExtent(bounds) catalog_root = "./data/catalog" logger.info("Writing STAC Catalog to {}...".format(catalog_root)) catalog.normalize_and_save(catalog_root, pystac.CatalogType.SELF_CONTAINED)
asset=stac.Asset(href=thumbpath1, media_type=stac.MediaType.PNG)) item2.add_asset(key='data', asset=stac.Asset(href=path2, media_type=stac.MediaType.COG)) item2.add_asset(key='metadata', asset=stac.Asset(href=metapath1, media_type=stac.MediaType.XML)) item2.add_asset(key='thumbnail', asset=stac.Asset(href=thumbpath2, media_type=stac.MediaType.PNG)) ## Temporal and Spatial Extent collection_interval = sorted([item1.datetime, item2.datetime]) temporal_extent = stac.TemporalExtent(intervals=[collection_interval]) spatial_extent = stac.SpatialExtent(bboxes=[bbox1, bbox2]) collection_extent = stac.Extent(spatial=spatial_extent, temporal=temporal_extent) collection = stac.Collection(id='static-maps', description='Collection of Static GIS Maps', extent=collection_extent, license='CC-BY-SA-4.0') collection.add_items([item1, item2]) catalog.add_child(collection) catalog.describe() catalog.normalize_hrefs(os.path.join(catalog_dir, 'stac')) catalog.save(catalog_type=stac.CatalogType.RELATIVE_PUBLISHED)
COG_COLLECTION_EXTENSIONS = [ 'https://stac-extensions.github.io/eo/v1.0.0/schema.json', 'https://github.com/azavea/nasa-hyperspectral/tree/master/docs/stac/hsi/json-schema/schema.json' ] COG_ITEM_EXTENSIONS = COG_COLLECTION_EXTENSIONS + \ ['https://stac-extensions.github.io/projection/v1.0.0/schema.json'] PRISMA_COG_COLLECTION = pystac.Collection( "prisma-cogs", "PRISMA Imagery converted to pixel-interleaved COGs", pystac.Extent( pystac.SpatialExtent([[-180, -90, 180, 90]]), pystac.TemporalExtent([[ datetime(2014, 1, 1, tzinfo=timezone.utc), datetime(2020, 1, 1, tzinfo=timezone.utc), ]]), ), stac_extensions=COG_COLLECTION_EXTENSIONS) PRISMA_COG_COLLECTION.links = [] PRISMA_COG_COLLECTION.properties = {} # https://directory.eoportal.org/web/eoportal/satellite-missions/p/prisma-hyperspectral def activation_output(item_id: str): with open('/tmp/activator-output.json', 'w') as outfile: json.dump( { 'sourceCollectionId': PRISMA_COG_COLLECTION.id,