def test_case_3(): root_cat = Catalog(id='test3', description='test case 3 catalog', title='test case 3 title') image_item = Item(id='imagery-item', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) image_item.add_asset('ortho', Asset(href='some/geotiff.tiff', media_type=MediaType.GEOTIFF)) overviews = [LabelOverview('label', counts=[LabelCount('one', 1), LabelCount('two', 2)])] label_item = LabelItem(id='label-items', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}, label_description='ML Labels', label_type='vector', label_properties=['label'], label_classes=[LabelClasses(classes=['one', 'two'], name='label')], label_tasks=['classification'], label_methods=['manual'], label_overviews=overviews) label_item.add_source(image_item, assets=['ortho']) root_cat.add_item(image_item) root_cat.add_item(label_item) return root_cat
def test_full_copy_2(self): with TemporaryDirectory() as tmp_dir: cat = Catalog(id='test', description='test catalog') image_item = Item(id='Imagery', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) for key in ['ortho', 'dsm']: image_item.add_asset( key, Asset(href='some/{}.tif'.format(key), media_type=MediaType.GEOTIFF)) label_item = LabelItem( id='Labels', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}, label_description='labels', label_type='vector', label_properties='label', label_classes=[LabelClasses(classes=['one', 'two'], name='label')], label_tasks=['classification']) label_item.add_source(image_item, assets=['ortho']) cat.add_items([image_item, label_item]) cat.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-source')) cat.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) cat2 = cat.full_copy() cat2.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-dest')) cat2.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) self.check_catalog(cat, 'source') self.check_catalog(cat2, 'dest')
def fix_item(item: Item, strategy: Strategy) -> None: """Modifies an item in-place to deal with antimeridian issues. If the item's geometry is not a `Polygon`, raises a `ValueError`. Args: item (pystac.Item): The item to be modified. """ geometry = shapely.geometry.shape(item.geometry) if not isinstance(geometry, Polygon): raise ValueError( f"Can only fix antimeridian issues for Polygons, geometry={geometry}" ) if strategy == Strategy.NORMALIZE: normalized_geometry = normalize(geometry) if normalized_geometry: bbox = normalized_geometry.bounds item.geometry = shapely.geometry.mapping(normalized_geometry) item.bbox = bbox elif strategy == Strategy.SPLIT: split_geometry = split(geometry) if split_geometry: xmin = 180 xmax = -180 for geom in split_geometry.geoms: if geom.bounds[0] > xmax: xmax = geom.bounds[0] if geom.bounds[2] < xmin: xmin = geom.bounds[2] bounds = split_geometry.bounds # https://datatracker.ietf.org/doc/html/rfc7946#section-5.2 item.bbox = [xmax, bounds[1], xmin, bounds[3]] item.geometry = shapely.geometry.mapping(split_geometry)
def test_null_datetime(self) -> None: item = pystac.Item.from_file( TestCases.get_path("data-files/item/sample-item.json")) with self.assertRaises(pystac.STACError): Item( "test", geometry=item.geometry, bbox=item.bbox, datetime=None, properties={}, ) null_dt_item = Item( "test", geometry=item.geometry, bbox=item.bbox, datetime=None, properties={ "start_datetime": datetime_to_str(get_opt(item.datetime)), "end_datetime": datetime_to_str(get_opt(item.datetime)), }, ) null_dt_item.validate()
def item(href: str, read_href_modifier: Optional[ReadHrefModifier] = None) -> Item: """Creates a STAC Item from the asset at the provided href. The `read_href_modifer` argument can be used to modify the href for the rasterio read, e.g. if you need to sign a url. This function is intentionally minimal in its signature and capabilities. If you need to customize your Item, do so after creation. This function sets: - id - geometry - bbox - datetime (to the time of item creation): you'll probably want to change this - the proj extension - either the EPSG code or, if not available, the WKT2 - transform - shape - a single asset with key 'data' - asset href - asset roles to ['data'] In particular, the datetime and asset media type fields most likely need to be updated. """ id = os.path.splitext(os.path.basename(href))[0] if read_href_modifier: modified_href = read_href_modifier(href) else: modified_href = href with rasterio.open(modified_href) as dataset: crs = dataset.crs proj_bbox = dataset.bounds proj_transform = list(dataset.transform)[0:6] proj_shape = dataset.shape proj_geometry = shapely.geometry.mapping(shapely.geometry.box(*proj_bbox)) geometry = stactools.core.projection.reproject_geom(crs, 'EPSG:4326', proj_geometry, precision=6) bbox = list(shapely.geometry.shape(geometry).bounds) item = Item(id=id, geometry=geometry, bbox=bbox, datetime=datetime.datetime.now(), properties={}) projection = ProjectionExtension.ext(item, add_if_missing=True) epsg = crs.to_epsg() if epsg: projection.epsg = epsg else: projection.wkt2 = crs.to_wkt('WKT2') projection.transform = proj_transform projection.shape = proj_shape item.add_asset('data', Asset(href=href, roles=['data'])) return item
def test_eo_items_are_heritable(self): item1 = Item(id='test-item-1', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={'key': 'one'}, stac_extensions=['eo', 'commons']) item2 = Item(id='test-item-2', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={'key': 'two'}, stac_extensions=['eo', 'commons']) wv3_bands = [ Band.create(name='Coastal', description='Coastal: 400 - 450 nm', common_name='coastal'), Band.create(name='Blue', description='Blue: 450 - 510 nm', common_name='blue'), Band.create(name='Green', description='Green: 510 - 580 nm', common_name='green'), Band.create(name='Yellow', description='Yellow: 585 - 625 nm', common_name='yellow'), Band.create(name='Red', description='Red: 630 - 690 nm', common_name='red'), Band.create(name='Red Edge', description='Red Edge: 705 - 745 nm', common_name='rededge'), Band.create(name='Near-IR1', description='Near-IR1: 770 - 895 nm', common_name='nir08'), Band.create(name='Near-IR2', description='Near-IR2: 860 - 1040 nm', common_name='nir09') ] spatial_extent = SpatialExtent(bboxes=[RANDOM_BBOX]) temporal_extent = TemporalExtent(intervals=[[item1.datetime, None]]) collection_extent = Extent(spatial=spatial_extent, temporal=temporal_extent) common_properties = { 'eo:bands': [b.to_dict() for b in wv3_bands], 'gsd': 0.3, 'eo:platform': 'Maxar', 'eo:instrument': 'WorldView3' } collection = Collection(id='test', description='test', extent=collection_extent, properties=common_properties, stac_extensions=['commons'], license='CC-BY-SA-4.0') collection.add_items([item1, item2]) with TemporaryDirectory() as tmp_dir: collection.normalize_hrefs(tmp_dir) collection.save(catalog_type=CatalogType.SELF_CONTAINED) read_col = Collection.from_file('{}/collection.json'.format(tmp_dir)) items = list(read_col.get_all_items()) self.assertEqual(len(items), 2) self.assertTrue(items[0].ext.implements('eo')) self.assertTrue(items[1].ext.implements('eo'))
def test_update_extents(self) -> None: catalog = TestCases.test_case_2() base_collection = catalog.get_child( "1a8c1632-fa91-4a62-b33e-3a87c2ebdf16") assert isinstance(base_collection, Collection) base_extent = base_collection.extent collection = base_collection.clone() item1 = Item( id="test-item-1", geometry=ARBITRARY_GEOM, bbox=[-180, -90, 180, 90], datetime=TEST_DATETIME, properties={"key": "one"}, stac_extensions=["eo", "commons"], ) item2 = Item( id="test-item-1", geometry=ARBITRARY_GEOM, bbox=[-180, -90, 180, 90], datetime=None, properties={ "start_datetime": datetime_to_str(datetime(2000, 1, 1, 12, 0, 0, 0)), "end_datetime": datetime_to_str(datetime(2000, 2, 1, 12, 0, 0, 0)), }, stac_extensions=["eo", "commons"], ) collection.add_item(item1) collection.update_extent_from_items() self.assertEqual([[-180, -90, 180, 90]], collection.extent.spatial.bboxes) self.assertEqual(len(base_extent.spatial.bboxes[0]), len(collection.extent.spatial.bboxes[0])) self.assertNotEqual(base_extent.temporal.intervals, collection.extent.temporal.intervals) collection.remove_item("test-item-1") collection.update_extent_from_items() self.assertNotEqual([[-180, -90, 180, 90]], collection.extent.spatial.bboxes) collection.add_item(item2) collection.update_extent_from_items() self.assertEqual( [[ item2.common_metadata.start_datetime, base_extent.temporal.intervals[0][1], ]], collection.extent.temporal.intervals, )
def test_from_file(self): label_example_1 = Item.from_file(self.label_example_1_uri) self.assertEqual(len(label_example_1.ext.label.label_overviews[0].counts), 2) label_example_1.validate() label_example_2 = Item.from_file(self.label_example_2_uri) self.assertEqual(len(label_example_2.ext.label.label_overviews[0].counts), 2) label_example_2.validate()
def _add_cog_assets( item: pystac.Item, xml_metadata: XmlMetadata, vnir_cog_href: Optional[str], swir_cog_href: Optional[str], tir_cog_href: Optional[str], read_href_modifier: Optional[ReadHrefModifier] = None) -> None: pointing_angles = xml_metadata.pointing_angles sensors_to_hrefs = { VNIR_SENSOR: vnir_cog_href, SWIR_SENSOR: swir_cog_href, TIR_SENSOR: tir_cog_href } def title_for(sensor): return f'{sensor} Swath data' sensors_to_bands = get_sensors_to_bands() for sensor in ASTER_SENSORS: if sensors_to_hrefs[sensor] is None: logger.warning(f'Skipping {sensor} COG') continue cog_href = sensors_to_hrefs[sensor] sensor_asset = pystac.Asset(href=cog_href, media_type=pystac.MediaType.COG, roles=['data'], title=title_for(sensor)) # Set bands item.ext.eo.set_bands(sensors_to_bands[sensor], sensor_asset) # Set view off_nadir if sensor in pointing_angles: item.ext.view.off_nadir = abs(pointing_angles[sensor]) # Open COG headers to get proj info cog_read_href = cog_href if read_href_modifier: cog_read_href = read_href_modifier(cog_read_href) with rio.open(cog_read_href) as ds: image_shape = list(ds.shape) proj_bbox = list(ds.bounds) transform = list(ds.transform) item.ext.projection.set_shape(image_shape, sensor_asset) item.ext.projection.set_bbox(proj_bbox, sensor_asset) item.ext.projection.set_transform(transform, sensor_asset) item.add_asset(sensor, sensor_asset)
def test_case_3() -> Catalog: root_cat = Catalog(id="test3", description="test case 3 catalog", title="test case 3 title") image_item = Item( id="imagery-item", geometry=ARBITRARY_GEOM, bbox=ARBITRARY_BBOX, datetime=datetime.utcnow(), properties={}, ) image_item.add_asset( "ortho", Asset(href="some/geotiff.tiff", media_type=MediaType.GEOTIFF)) overviews = [ LabelOverview.create( "label", counts=[ LabelCount.create("one", 1), LabelCount.create("two", 2) ], ) ] label_item = Item( id="label-items", geometry=ARBITRARY_GEOM, bbox=ARBITRARY_BBOX, datetime=datetime.utcnow(), properties={}, ) LabelExtension.add_to(label_item) label_ext = LabelExtension.ext(label_item) label_ext.apply( label_description="ML Labels", label_type=LabelType.VECTOR, label_properties=["label"], label_classes=[ LabelClasses.create(classes=["one", "two"], name="label") ], label_tasks=["classification"], label_methods=["manual"], label_overviews=overviews, ) label_ext.add_source(image_item, assets=["ortho"]) root_cat.add_item(image_item) root_cat.add_item(label_item) return root_cat
def merge_items(source_item: pystac.Item, target_item: pystac.Item, move_assets: bool = False, ignore_conflicts: bool = False) -> None: """Merges the assets from source_item into target_item. The geometry and bounding box of the items will also be merged. Args: source_item (pystac.Item): The Item that will be merged into target_item. This item is not mutated in this operation. target_item (pystac.Item): The target item that will be merged into. This item will be mutated in this operation. move_assets (bool): If true, move the asset files alongside the target item. ignore_conflicts (bool): If True, assets with the same keys will not be merged, and asset files that would be moved to overwrite an existing file will not be moved. If False, either of these situations will throw an error. """ target_item_href = target_item.get_self_href() if target_item_href is None: raise ValueError( f"Target Item {target_item.id} must have an HREF for merge") for key, asset in source_item.assets.items(): if key in target_item.assets: if ignore_conflicts: continue else: raise Exception( 'Target item {} already has asset with key {}, ' 'cannot merge asset in from {}'.format( target_item, key, source_item)) else: asset_href = asset.get_absolute_href() if asset_href is None: raise ValueError( f"Asset {asset.title} must have an HREF for merge") if move_assets: new_asset_href = move_asset_file_to_item( target_item, asset_href, ignore_conflicts=ignore_conflicts) else: if not is_absolute_href(asset.href): asset_href = make_relative_href(asset_href, target_item_href) new_asset_href = asset_href new_asset = asset.clone() new_asset.href = new_asset_href target_item.add_asset(key, new_asset) source_geom = shape(source_item.geometry) target_geom = shape(target_item.geometry) union_geom = source_geom.union(target_geom).buffer(0) target_item.geometry = mapping(union_geom) target_item.bbox = list(union_geom.bounds)
def test_from_items(self) -> None: item1 = Item( id="test-item-1", geometry=ARBITRARY_GEOM, bbox=[-10, -20, 0, -10], datetime=datetime(2000, 2, 1, 12, 0, 0, 0, tzinfo=tz.UTC), properties={}, ) item2 = Item( id="test-item-2", geometry=ARBITRARY_GEOM, bbox=[0, -9, 10, 1], datetime=None, properties={ "start_datetime": datetime_to_str( datetime(2000, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC)), "end_datetime": datetime_to_str( datetime(2000, 7, 1, 12, 0, 0, 0, tzinfo=tz.UTC)), }, ) item3 = Item( id="test-item-2", geometry=ARBITRARY_GEOM, bbox=[-5, -20, 5, 0], datetime=None, properties={ "start_datetime": datetime_to_str( datetime(2000, 12, 1, 12, 0, 0, 0, tzinfo=tz.UTC)), "end_datetime": datetime_to_str( datetime(2001, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC), ), }, ) extent = Extent.from_items([item1, item2, item3]) self.assertEqual(len(extent.spatial.bboxes), 1) self.assertEqual(extent.spatial.bboxes[0], [-10, -20, 10, 1]) self.assertEqual(len(extent.temporal.intervals), 1) interval = extent.temporal.intervals[0] self.assertEqual(interval[0], datetime(2000, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC)) self.assertEqual(interval[1], datetime(2001, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC))
def test_from_file(self) -> None: label_example_1 = Item.from_file(self.label_example_1_uri) overviews = get_opt( LabelExtension.ext(label_example_1).label_overviews) self.assertEqual(len(get_opt(overviews[0].counts)), 2) label_example_1.validate() label_example_2 = Item.from_file(self.label_example_2_uri) overviews2 = get_opt( LabelExtension.ext(label_example_2).label_overviews) self.assertEqual(len(get_opt(overviews2[0].counts)), 2) label_example_2.validate()
def test_update_extents(self): catalog = TestCases.test_case_2() base_collection = catalog.get_child( '1a8c1632-fa91-4a62-b33e-3a87c2ebdf16') base_extent = base_collection.extent collection = base_collection.clone() item1 = Item(id='test-item-1', geometry=RANDOM_GEOM, bbox=[-180, -90, 180, 90], datetime=TEST_DATETIME, properties={'key': 'one'}, stac_extensions=['eo', 'commons']) item2 = Item(id='test-item-1', geometry=RANDOM_GEOM, bbox=[-180, -90, 180, 90], datetime=None, properties={ 'start_datetime': datetime_to_str(datetime(2000, 1, 1, 12, 0, 0, 0)), 'end_datetime': datetime_to_str(datetime(2000, 2, 1, 12, 0, 0, 0)) }, stac_extensions=['eo', 'commons']) collection.add_item(item1) collection.update_extent_from_items() self.assertEqual([[-180, -90, 180, 90]], collection.extent.spatial.bboxes) self.assertEqual(len(base_extent.spatial.bboxes[0]), len(collection.extent.spatial.bboxes[0])) self.assertNotEqual(base_extent.temporal.intervals, collection.extent.temporal.intervals) collection.remove_item('test-item-1') collection.update_extent_from_items() self.assertNotEqual([[-180, -90, 180, 90]], collection.extent.spatial.bboxes) collection.add_item(item2) collection.update_extent_from_items() self.assertEqual([[ item2.common_metadata.start_datetime, base_extent.temporal.intervals[0][1] ]], collection.extent.temporal.intervals)
def register_item(self, source: Source, item: Item, replace: bool): logger.info('Ingesting product') assets = item.get_assets() if 'inspire-metadata' in assets and 'product-metadata' in assets: inspire_xml = href_to_path(assets['inspire-metadata'].href) esa_xml = href_to_path(assets['product-metadata'].href) esa_xml_local = '/tmp/esa-metadata.xml' inspire_xml_local = '/tmp/inspire-metadata.xml' logger.info(f"ESA XML metadata file: {esa_xml}") logger.info(f"INSPIRE XML metadata file: {inspire_xml}") try: source.get_file(inspire_xml, inspire_xml_local) source.get_file(esa_xml, esa_xml_local) except Exception as err: logger.error(err) raise logger.info('Generating ISO XML based on ESA and INSPIRE XML') imo = ISOMetadata(os.path.dirname(inspire_xml)) with open(esa_xml_local, 'rb') as a, open(inspire_xml_local, 'rb') as b: # noqa iso_metadata = imo.from_esa_iso_xml( a.read(), b.read(), self.collections, self.ows_url) for tmp_file in [esa_xml_local, inspire_xml_local]: logger.debug(f"Removing temporary file {tmp_file}") os.remove(tmp_file) else: logger.info('Ingesting processing result') self_href = item.get_links('self')[0].get_absolute_href() parsed = urlparse(self_href) parsed = parsed._replace(path=os.path.dirname(parsed.path)) base_url = urlunparse(parsed) logger.debug(f'base URL {base_url}') base_url = f's3://{base_url}' imo = ISOMetadata(base_url) iso_metadata = imo.from_stac_item( json.dumps(item.to_dict(transform_hrefs=False)), self.ows_url ) logger.debug(f'Upserting metadata: {iso_metadata}') self._parse_and_upsert_metadata(iso_metadata)
def test_altered_ids_are_retained(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) message = Message(full_message) items = [ Item('mutate-me', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}), Item('2', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}) ] catalog.add_items(items) adapter = AdapterTester(message, catalog, config=self.config) (message, out_catalog) = adapter.invoke() out_items = [item for item in out_catalog.get_items()] self.assertEqual(out_items[0].id, 'i-mutated-you')
def stac_api_to_stac(uri: str) -> dict: """ Takes in a URI and uses that to feed the STAC transform """ item = Item.from_file(uri) return transform_stac_to_stac(item, source_link=uri, enable_proj=False)
def test_from_item(self): i = Item.from_file(self.URI_1) with self.assertRaises(AttributeError): getattr(i, 'bands') self.assertTrue('eo:bands' in i.properties.keys()) eo_ext = i.ext.eo self.assertIsNotNone(getattr(eo_ext, 'bands'))
def test_set_asset_bands(self): eo_item = pystac.read_file( TestCases.get_path('data-files/eo/eo-landsat-example.json')) b1_asset = eo_item.assets['B1'] eo_item.ext.eo.set_asset_bands(b1_asset, ['B2']) eo_item_mod = Item.from_dict(eo_item.to_dict()) b1_asset_mod = eo_item_mod.assets['B1'] asset_bands = eo_item_mod.ext.eo.get_asset_bands(b1_asset_mod) self.assertIsNot(None, asset_bands) self.assertEqual(len(asset_bands), 1) self.assertEqual(asset_bands[0].name, 'B2') self.validator.validate_object(eo_item) # Check setting with invalid keys with self.assertRaises(KeyError): eo_item.ext.eo.set_asset_bands(b1_asset, ['BAD_KEY', 'BAD_KEY_2']) # Check adding a new asset asset = pystac.Asset(href="some/path.tif", media_type=pystac.MediaType.GEOTIFF) eo_item.ext.eo.set_asset_bands(asset, [b.name for b in eo_item.ext.eo.bands]) eo_item.add_asset("test", asset) self.assertEqual(eo_item.assets["test"].properties["eo:bands"], list(range(0, len(eo_item.ext.eo.bands))))
def test_from_items(self): item1 = Item(id='test-item-1', geometry=RANDOM_GEOM, bbox=[-10, -20, 0, -10], datetime=datetime(2000, 2, 1, 12, 0, 0, 0, tzinfo=tz.UTC), properties={}) item2 = Item(id='test-item-2', geometry=RANDOM_GEOM, bbox=[0, -9, 10, 1], datetime=None, properties={ 'start_datetime': datetime_to_str( datetime(2000, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC)), 'end_datetime': datetime_to_str( datetime(2000, 7, 1, 12, 0, 0, 0, tzinfo=tz.UTC)) }) item3 = Item(id='test-item-2', geometry=RANDOM_GEOM, bbox=[-5, -20, 5, 0], datetime=None, properties={ 'start_datetime': datetime_to_str( datetime(2000, 12, 1, 12, 0, 0, 0, tzinfo=tz.UTC)), 'end_datetime': datetime_to_str( datetime(2001, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC), ) }) extent = Extent.from_items([item1, item2, item3]) self.assertEqual(len(extent.spatial.bboxes), 1) self.assertEqual(extent.spatial.bboxes[0], [-10, -20, 10, 1]) self.assertEqual(len(extent.temporal.intervals), 1) interval = extent.temporal.intervals[0] self.assertEqual(interval[0], datetime(2000, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC)) self.assertEqual(interval[1], datetime(2001, 1, 1, 12, 0, 0, 0, tzinfo=tz.UTC))
def to_stac(item, in_tiff): item_out = Item(id=item.id, geometry=item.geometry, bbox=item.bbox, datetime=item.datetime, properties=item.properties) item_out.common_metadata.set_gsd(20) item_out.common_metadata.set_constellation('sentinel-1') item_out.common_metadata.set_mission('sentinel-1') item_out.common_metadata.set_platform('sentinel-1{}'.format( item.id[2:3].lower())) eo_item = extensions.eo.EOItemExt(item_out) band = 'sigma_db_vv ' item_out.add_asset(key=band.lower(), asset=Asset(href='{}_{}.tif'.format( item.id, band.upper()), media_type=MediaType.GEOTIFF, properties={ 'sar:polarizations': band.lower().split('_')[1].upper() })) asset = eo_item.item.get_assets()[band.lower()] description = '{} for polarization channel {}{}'.format( band.lower().split('_')[0].title(), band.lower().split('_')[1].upper(), ' in {}'.format(band.lower().split('_')[2]) if len(band.lower().split('_')) == 3 else '') stac_band = extensions.eo.Band.create(name=band.lower(), common_name=band.lower(), description=description) #bands.append(stac_band) eo_item.set_bands([stac_band], asset=asset) #eo_item.set_bands(bands) #eo_item.apply(bands) return item_out
def test_invocation_processes_items_with_sources(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) message = Message(full_message) items = [ Item('1', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}), Item('2', None, [0, 0, 1, 2], '2020-01-01T00:00:00.000Z', {}) ] catalog.add_items(items) adapter = AdapterTester(message, catalog, config=self.config) adapter.invoke() self.assertEqual(AdapterTester.process_args[0][0].bbox, items[0].bbox) self.assertEqual(AdapterTester.process_args[1][0].bbox, items[1].bbox) self.assertEqual(AdapterTester.process_args[0][1], message.sources[0]) self.assertEqual(AdapterTester.process_args[1][1], message.sources[0])
def test_asset_absolute_href(self) -> None: item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) rel_asset = Asset("./data.geojson") rel_asset.set_owner(item) expected_href = os.path.abspath("./data.geojson") actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_self_contained_item(self): item_dict = self.get_example_item_dict() item_dict['links'] = [ link for link in item_dict['links'] if link['rel'] == 'self' ] item = Item.from_dict(item_dict) self.assertIsInstance(item, Item) self.assertEqual(len(item.links), 1)
def test_datetime_ISO8601_format(self) -> None: item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) formatted_time = item.to_dict()["properties"]["datetime"] self.assertEqual("2016-05-03T13:22:30.040000Z", formatted_time)
def test_asset_absolute_href(self): item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) rel_asset = Asset('./data.geojson') rel_asset.set_owner(item) expected_href = 'http://cool-sat.com/catalog/CS3-20160503_132130_04/data.geojson' actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_items_with_no_input_source_raise_exceptions(self): catalog = Catalog('0', 'Catalog 0') catalog.add_item( Item('1', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {})) adapter = AdapterTester(Message(full_message), catalog, config=self.config) self.assertRaises(RuntimeError, adapter.invoke)
def test_datetime_ISO8601_format(self): item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) formatted_time = item.to_dict()['properties']['datetime'] self.assertEqual('2016-05-03T13:22:30.040000Z', formatted_time)
def item_mapper(item: pystac.Item) -> pystac.Item: new_assets = [ x for result in map(apply_asset_mapper, item.assets.items()) for x in result ] item.assets = dict(new_assets) return item
def test_map_items_multiple_2(self): catalog = Catalog(id='test-1', description='Test1') item1 = Item(id='item1', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) item1.add_asset('ortho', Asset(href='/some/ortho.tif')) catalog.add_item(item1) kitten = Catalog(id='test-kitten', description='A cuter version of catalog') catalog.add_child(kitten) item2 = Item(id='item2', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) item2.add_asset('ortho', Asset(href='/some/other/ortho.tif')) kitten.add_item(item2) def modify_item_title(item): item.title = 'Some new title' return item def create_label_item(item): # Assumes the GEOJSON labels are in the # same location as the image img_href = item.assets['ortho'].href label_href = '{}.geojson'.format(os.path.splitext(img_href)[0]) label_item = Item(id='Labels', geometry=item.geometry, bbox=item.bbox, datetime=datetime.utcnow(), properties={}) label_item.ext.enable(Extensions.LABEL) label_ext = label_item.ext.label label_ext.apply(label_description='labels', label_type='vector', label_properties=['label'], label_classes=[ LabelClasses.create(classes=['one', 'two'], name='label') ], label_tasks=['classification']) label_ext.add_source(item, assets=['ortho']) label_ext.add_geojson_labels(label_href) return [item, label_item] c = catalog.map_items(modify_item_title) c = c.map_items(create_label_item) new_catalog = c items = new_catalog.get_all_items() self.assertTrue(len(list(items)) == 4)