def add_item(self, path_pro: Optional[str], path_tiff: Optional[str], path_img: Optional[str]) -> None: assert path_pro.endswith(".pro") file_name = path_pro.split("\\")[-1].rstrip(".pro") print(file_name) b0, data = parse(path_pro) item: Item = stac.create_item(i_id=file_name, metadata=b0) assets: List[Asset] = [ Asset(href=path_pro, media_type="pro") ] if path_tiff is not None: assets.append(Asset(href=path_tiff, media_type="geotiff")) if path_img is not None: assets.append(Asset(href=path_img, media_type="img")) stac.add_assets(item, assets) catalog = self.root_catalog.get_child(str(b0["b0_common"]["satId"][0])) if catalog is None: extent = Extent(spatial=SpatialExtent([[-180, -90, 180, 90]]), # TODO: Реальный Extent temporal=TemporalExtent([[ datetime.strptime("2009-01-01T00:00:00.000000", "%Y-%m-%dT%H:%M:%S.%f"), None]])) catalog = Collection(id=str(b0["b0_common"]["satId"][0]), title=b0["b0_common"]["satName"][0].decode("utf-8"), description=f"Catalog for satellite " f"{b0['b0_common']['satName'][0].decode('utf-8')}", extent=extent) self.root_catalog.add_child(catalog, catalog.title) # update_collection_extent(item, catalog) catalog.add_item(item)
def test_asset_absolute_href(self): item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) rel_asset = Asset('./data.geojson') rel_asset.set_owner(item) expected_href = 'http://cool-sat.com/catalog/CS3-20160503_132130_04/data.geojson' actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_asset_absolute_href(self) -> None: item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) rel_asset = Asset("./data.geojson") rel_asset.set_owner(item) expected_href = os.path.abspath("./data.geojson") actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_asset_absolute_href_no_item_self(self) -> None: item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) assert item.get_self_href() is None rel_asset = Asset("./data.geojson") rel_asset.set_owner(item) actual_href = rel_asset.get_absolute_href() self.assertEqual(None, actual_href)
def build_items(index_geom): """ index_geom: fioan readable file (ex, shapefile) Build the STAC items """ with fiona.open(index_geom) as src: src_crs = Proj(src.crs) dest_crs = Proj("WGS84") extent = box(*src.bounds) project = Transformer.from_proj(src_crs, dest_crs) catalog_bbox = shapely_transform(project.transform, extent) # build spatial extent for collection ortho_collection = GeobaseSTAC.get_child("canada_spot_orthoimages") ortho_collection.extent.spatial = SpatialExtent( [list(catalog_bbox.bounds)]) geobase = GeobaseSpotFTP() count = 0 for f in src: feature_out = f.copy() new_coords = transform_geom(src_crs, dest_crs, f["geometry"]["coordinates"]) feature_out["geometry"]["coordinates"] = new_coords name = feature_out["properties"]["NAME"] sensor = SPOT_SENSOR[name[:2]] new_item = create_item(name, feature_out, ortho_collection) for f in geobase.list_contents(name): # Add data to the asset spot_file = Asset(href=f, title=None, media_type="application/zip") file_key = f[-13:-4] # image type new_item.add_asset(file_key, spot_file) # Add the thumbnail asset new_item.add_asset( key="thumbnail", asset=Asset( href=geobase.get_thumbnail(name), title=None, media_type=MediaType.JPEG, ), ) ortho_collection.add_item(new_item) count += 1 print(f"{count}... {new_item.id}")
def test_from_asset(self): a1 = Asset.from_dict(self.ASSET_DICT) with self.assertRaises(STACError): EOAsset.from_asset(a1) a2 = Asset.from_dict(self.EO_ASSET_DICT) eoa = EOAsset.from_asset(a2) self.assertIsNone(eoa.properties) self.assertListEqual(eoa.bands, [0])
def test_map_items_multiple_2(self): catalog = Catalog(id='test-1', description='Test1') item1 = Item(id='item1', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) item1.add_asset('ortho', Asset(href='/some/ortho.tif')) catalog.add_item(item1) kitten = Catalog(id='test-kitten', description='A cuter version of catalog') catalog.add_child(kitten) item2 = Item(id='item2', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) item2.add_asset('ortho', Asset(href='/some/other/ortho.tif')) kitten.add_item(item2) def modify_item_title(item): item.title = 'Some new title' return item def create_label_item(item): # Assumes the GEOJSON labels are in the # same location as the image img_href = item.assets['ortho'].href label_href = '{}.geojson'.format(os.path.splitext(img_href)[0]) label_item = Item(id='Labels', geometry=item.geometry, bbox=item.bbox, datetime=datetime.utcnow(), properties={}) label_item.ext.enable(Extensions.LABEL) label_ext = label_item.ext.label label_ext.apply(label_description='labels', label_type='vector', label_properties=['label'], label_classes=[ LabelClasses.create(classes=['one', 'two'], name='label') ], label_tasks=['classification']) label_ext.add_source(item, assets=['ortho']) label_ext.add_geojson_labels(label_href) return [item, label_item] c = catalog.map_items(modify_item_title) c = c.map_items(create_label_item) new_catalog = c items = new_catalog.get_all_items() self.assertTrue(len(list(items)) == 4)
def test_asset_absolute_href(self) -> None: item_path = TestCases.get_path("data-files/item/sample-item.json") item_dict = self.get_example_item_dict() item = Item.from_dict(item_dict) item.set_self_href(item_path) rel_asset = Asset("./data.geojson") rel_asset.set_owner(item) expected_href = os.path.abspath( os.path.join(os.path.dirname(item_path), "./data.geojson")) actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_asset_absolute_href(self): m = TestCases.get_path( 'data-files/itemcollections/sample-item-collection.json') with open(m) as f: item_dict = json.load(f)['features'][0] item = Item.from_dict(item_dict) rel_asset = Asset('./data.geojson') rel_asset.set_owner(item) expected_href = 'http://cool-sat.com/catalog/CS3-20160503_132130_04/data.geojson' actual_href = rel_asset.get_absolute_href() self.assertEqual(expected_href, actual_href)
def test_full_copy_2(self): with TemporaryDirectory() as tmp_dir: cat = Catalog(id='test', description='test catalog') image_item = Item(id='Imagery', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) for key in ['ortho', 'dsm']: image_item.add_asset( key, Asset(href='some/{}.tif'.format(key), media_type=MediaType.GEOTIFF)) label_item = LabelItem( id='Labels', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}, label_description='labels', label_type='vector', label_properties='label', label_classes=[LabelClasses(classes=['one', 'two'], name='label')], label_tasks=['classification']) label_item.add_source(image_item, assets=['ortho']) cat.add_items([image_item, label_item]) cat.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-source')) cat.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) cat2 = cat.full_copy() cat2.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-dest')) cat2.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) self.check_catalog(cat, 'source') self.check_catalog(cat2, 'dest')
def data_asset(self, base: str = DEFAULT_BASE) -> Asset: """Returns the data asset (aka the tiff file).""" return Asset(href=self._asset_href_with_extension(base, "tif"), title=self.title, description=self.description, media_type=MediaType.COG, roles=["data"])
def gpkg_asset(self, base: str = DEFAULT_BASE) -> Asset: """Returns the geopackage asset.""" return Asset(href=self._asset_href_with_extension(base, "gpkg", id_only=True), media_type=MediaType.GEOPACKAGE, roles=["metadata"])
def test_case_3(): root_cat = Catalog(id='test3', description='test case 3 catalog', title='test case 3 title') image_item = Item(id='imagery-item', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) image_item.add_asset('ortho', Asset(href='some/geotiff.tiff', media_type=MediaType.GEOTIFF)) overviews = [LabelOverview('label', counts=[LabelCount('one', 1), LabelCount('two', 2)])] label_item = LabelItem(id='label-items', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}, label_description='ML Labels', label_type='vector', label_properties=['label'], label_classes=[LabelClasses(classes=['one', 'two'], name='label')], label_tasks=['classification'], label_methods=['manual'], label_overviews=overviews) label_item.add_source(image_item, assets=['ortho']) root_cat.add_item(image_item) root_cat.add_item(label_item) return root_cat
def item(href: str, read_href_modifier: Optional[ReadHrefModifier] = None) -> Item: """Creates a STAC Item from the asset at the provided href. The `read_href_modifer` argument can be used to modify the href for the rasterio read, e.g. if you need to sign a url. This function is intentionally minimal in its signature and capabilities. If you need to customize your Item, do so after creation. This function sets: - id - geometry - bbox - datetime (to the time of item creation): you'll probably want to change this - the proj extension - either the EPSG code or, if not available, the WKT2 - transform - shape - a single asset with key 'data' - asset href - asset roles to ['data'] In particular, the datetime and asset media type fields most likely need to be updated. """ id = os.path.splitext(os.path.basename(href))[0] if read_href_modifier: modified_href = read_href_modifier(href) else: modified_href = href with rasterio.open(modified_href) as dataset: crs = dataset.crs proj_bbox = dataset.bounds proj_transform = list(dataset.transform)[0:6] proj_shape = dataset.shape proj_geometry = shapely.geometry.mapping(shapely.geometry.box(*proj_bbox)) geometry = stactools.core.projection.reproject_geom(crs, 'EPSG:4326', proj_geometry, precision=6) bbox = list(shapely.geometry.shape(geometry).bounds) item = Item(id=id, geometry=geometry, bbox=bbox, datetime=datetime.datetime.now(), properties={}) projection = ProjectionExtension.ext(item, add_if_missing=True) epsg = crs.to_epsg() if epsg: projection.epsg = epsg else: projection.wkt2 = crs.to_wkt('WKT2') projection.transform = proj_transform projection.shape = proj_shape item.add_asset('data', Asset(href=href, roles=['data'])) return item
def process_item(self, item, source): """ Converts an input STAC Item's data into Zarr, returning an output STAC item Parameters ---------- item : pystac.Item the item that should be converted source : harmony.message.Source the input source defining the variables, if any, to subset from the item Returns ------- pystac.Item a STAC item containing the Zarr output """ result = item.clone() result.assets = {} # Create a temporary dir for processing we may do workdir = mkdtemp() try: # Get the data file asset = next(v for k, v in item.assets.items() if 'data' in (v.roles or [])) input_filename = download(asset.href, workdir, logger=self.logger, access_token=self.message.accessToken, cfg=self.config) name = generate_output_filename(asset.href, ext='.zarr') root = self.message.stagingLocation + name try: store = self.s3.get_mapper(root=root, check=False, create=True) netcdf_to_zarr(input_filename, store) except Exception as e: # Print the real error and convert to user-facing error that's more digestible self.logger.error(e, exc_info=1) filename = asset.href.split('?')[0].rstrip('/').split('/')[-1] raise ZarrException('Could not convert file to Zarr: %s' % (filename)) # Update the STAC record result.assets['data'] = Asset(root, title=name, media_type='application/x-zarr', roles=['data']) # Return the STAC record return result finally: # Clean up any intermediate resources shutil.rmtree(workdir)
def test_case_3() -> Catalog: root_cat = Catalog(id="test3", description="test case 3 catalog", title="test case 3 title") image_item = Item( id="imagery-item", geometry=ARBITRARY_GEOM, bbox=ARBITRARY_BBOX, datetime=datetime.utcnow(), properties={}, ) image_item.add_asset( "ortho", Asset(href="some/geotiff.tiff", media_type=MediaType.GEOTIFF)) overviews = [ LabelOverview.create( "label", counts=[ LabelCount.create("one", 1), LabelCount.create("two", 2) ], ) ] label_item = Item( id="label-items", geometry=ARBITRARY_GEOM, bbox=ARBITRARY_BBOX, datetime=datetime.utcnow(), properties={}, ) LabelExtension.add_to(label_item) label_ext = LabelExtension.ext(label_item) label_ext.apply( label_description="ML Labels", label_type=LabelType.VECTOR, label_properties=["label"], label_classes=[ LabelClasses.create(classes=["one", "two"], name="label") ], label_tasks=["classification"], label_methods=["manual"], label_overviews=overviews, ) label_ext.add_source(image_item, assets=["ortho"]) root_cat.add_item(image_item) root_cat.add_item(label_item) return root_cat
def create_asset(self, asset): """ """ desc = asset.get(self.mappings.ASSET_DESCRIPTION) return Asset( asset.get(self.mappings.ASSET_URL), title=asset.get(self.mappings.ASSET_TITLE), description=desc, media_type=self.mappings.TYPE_DICT.get(desc), roles=[self.mappings.ROLE_DICT.get(desc)], )
def test_add_asset(self): eoi_c = deepcopy(self.eoi) a = Asset('/asset_dir/asset.json') eoa = EOAsset('/asset_dir/eo_asset.json', bands=[0, 1]) for asset in (a, eoa): self.assertIsNone(asset.owner) eoi_c.add_asset('new_asset', a) eoi_c.add_asset('new_eo_asset', eoa) self.assertEqual(len(eoi_c.assets.items()), len(self.eoi.assets.items()) + 2) self.assertEqual(a, eoi_c.assets['new_asset']) self.assertEqual(eoa, eoi_c.assets['new_eo_asset']) for asset in (a, eoa): self.assertEqual(asset.owner, eoi_c)
def generate_cog(stac_item: Item, destination_uri: str, target_assets: List): """ """ for input_layer in target_assets: asset = stac_item.assets.get(input_layer) result = main(asset.href, join(destination_uri, basename(asset.href))) asset.add_asset( "{}_cog".format(input_layer), Asset(href=result['destination_layer'], title=asset.title, description=asset.description, media_type=MediaType.COG, roles=asset.roles, properties=asset.properties)) return stac_item.to_dict()
def to_stac(item, in_tiff): item_out = Item(id=item.id, geometry=item.geometry, bbox=item.bbox, datetime=item.datetime, properties=item.properties) item_out.common_metadata.set_gsd(20) item_out.common_metadata.set_constellation('sentinel-1') item_out.common_metadata.set_mission('sentinel-1') item_out.common_metadata.set_platform('sentinel-1{}'.format(item.id[2:3].lower())) eo_item = extensions.eo.EOItemExt(item_out) band = 'sigma_db_vv' item_out.add_asset(key=band.lower(), asset=Asset(href=in_tiff, media_type=MediaType.GEOTIFF, properties={'sar:polarizations': band.lower().split('_')[1].upper()})) asset = eo_item.item.get_assets()[band.lower()] description = '{} for polarization channel {}{}'.format(band.lower().split('_')[0].title(), band.lower().split('_')[1].upper(), ' in {}'.format(band.lower().split('_')[2]) if len(band.lower().split('_')) == 3 else '') stac_band = extensions.eo.Band.create(name=band.lower(), common_name=band.lower(), description=description) #bands.append(stac_band) eo_item.set_bands([stac_band], asset=asset) #eo_item.set_bands(bands) #eo_item.apply(bands) return item_out
def collection_add_sentinel_chips(collection, uri_list, sentinel_version, debug=False): """ Add sentinel images to a collection """ if debug: uri_list = list(uri_list)[:10] for uri in uri_list: if not uri.endswith(".tif"): continue item_id = os.path.basename(uri).split(".")[0] country, event_id, *_ = item_id.split("_") params = {} params["id"] = item_id params["collection"] = collection params["properties"] = { "country": country, "event_id": event_id, } params["bbox"] = get_chip_bbox(uri, country, event_id) params["geometry"] = box(*params["bbox"]).__geo_interface__ params["datetime"] = image_date_for_country(sentinel_version, country) # Create Tiff Item item = Item(**params) asset = Asset(href=uri, title="GeoTiff", media_type="image/tiff; application=geotiff") item.add_asset(key="image", asset=asset) SENTINEL_CHIP_ITEM_CACHE[sentinel_version.upper()][chip_cache_id( country, event_id)] = item collection.add_item(item) print("Collection {}: Added STAC Item {}".format( collection.id, item.id))
def label_collection_add_items( collection, root_catalog, uri_list, links_func, label_description, label_type, label_classes=None, label_tasks=None, debug=False, ): """ Add uri_list tif uris to collection as LabelItems root_catalog is the top level node in the STAC Catalog where the chips labeled by these tifs can be found. Required to correctly setup Links to the source chips. links_func is a method with the following signature: def links_func(root_catalog: Catalog, label_item: LabelItem, country: str, event_id: int): [Link] This method should construct a list of links that map the label_item to the STAC objects in the root_catalog that label_item is derived from. Assumes for now that the asset referenced uses the key "labels" The label_ arguments will be passed down to each LabelItem in the collection """ if debug: uri_list = list(uri_list)[:10] for uri in uri_list: if not uri.endswith(".tif"): continue item_id = os.path.basename(uri).split(".")[0] country, event_id, *_ = item_id.split("_") params = {} params["id"] = item_id params["collection"] = collection params["datetime"] = image_date_for_country("s1", country) params["stac_extensions"] = [Extensions.LABEL] params["properties"] = { "country": country, "event_id": event_id, } params["bbox"] = get_chip_bbox(uri, country, event_id) params["geometry"] = box(*params["bbox"]).__geo_interface__ label_ext_params = {} if isinstance(label_classes, list): label_ext_params["label_classes"] = label_classes else: label_ext_params["label_classes"] = [] label_ext_params["label_description"] = label_description if label_tasks is not None: label_ext_params["label_tasks"] = label_tasks label_ext_params["label_type"] = label_type item = Item(**params) item.ext.label.apply(**label_ext_params) # Add Asset asset = Asset(href=uri, title="GeoTiff", media_type="image/tiff; application=geotiff") item.add_asset(key="labels", asset=asset) item.links = links_func(root_catalog, item, country, event_id) collection.add_item(item) print("Collection {}: Added STAC Item {}".format( collection.id, item.id))
def main(ctx, input_reference, s_expression, cbn): dump(ctx) item = get_item(os.path.join(input_reference, "catalog.json")) logging.info(f"Processing {item.id}") try: os.mkdir(item.id) except FileExistsError: pass cbn = cbn.replace(' ', '-') result = os.path.join(item.id, f"{cbn}.tif") logging.info(f"Apply {s_expression} to {item.id}") apply_s_expression(item=item, s_expression=s_expression, out_tif=result) logging.info("STAC") item_out = Item( id=item.id, geometry=item.geometry, bbox=item.bbox, datetime=item.datetime, properties=item.properties, stac_extensions=item.stac_extensions, ) eo_item = extensions.eo.EOItemExt(item_out) asset_properties = dict() asset_properties["s-expression"] = s_expression asset = Asset( href=os.path.basename(result), media_type=MediaType.COG, roles=["data"], properties=asset_properties, ) eo_bands = [ extensions.eo.Band.create( name=cbn.lower(), common_name=cbn.lower(), description=f"{cbn.lower()} ({s_expression})", ) ] eo_item.set_bands(eo_bands, asset=asset) item_out.add_asset(key=cbn.lower(), asset=asset) logging.info("STAC") cat = Catalog(id="catalog", description="s-expression") cat.add_items([item_out]) cat.normalize_and_save(root_href="./", catalog_type=CatalogType.SELF_CONTAINED) logging.info("Done!")
def burned(pre_item, post_item, ndvi_threshold, ndwi_threshold): items = {} items["pre-event"] = pre_item items["post-event"] = post_item veg_indices = {} scl = {} for key, item in items.items(): stack( item=item, bands=["B04", "B08", "B11", "SCL"], target_res=10, output_name=f"{key}.tif", ) ndvi, ndwi = indices(f"{key}.tif") veg_indices[key] = {"ndvi": ndvi, "ndwi": ndwi} scl[key] = get_scl(f"{key}.tif") if key in ["pre-event"]: geotransform, georef = get_geo(f"{key}.tif") os.remove(f"{key}.tif") conditions = ( (veg_indices["post-event"]["ndwi"] - veg_indices["pre-event"]["ndwi"]) > float(ndwi_threshold) ) & ( (veg_indices["post-event"]["ndvi"] - veg_indices["pre-event"]["ndvi"]) > float(ndvi_threshold) ) & ( scl["pre-event"] == 4 ) | ( scl["post-event"] == 4 ) height = scl["pre-event"].shape[1] width = scl["pre-event"].shape[0] burned = np.zeros((height, width), dtype=np.uint8) burned[conditions] = 1 # free memory for key, _ in items.items(): for indice, _ in veg_indices.items(): veg_indices[key][indice] = None burned[ np.where( (scl["pre-event"] == 0) | (scl["post-event"] == 0) | (scl["pre-event"] == 1) | (scl["post-event"] == 1) | (scl["pre-event"] == 5) | (scl["post-event"] == 5) | (scl["pre-event"] == 6) | (scl["post-event"] == 6) | (scl["pre-event"] == 7) | (scl["post-event"] == 7) | (scl["pre-event"] == 8) | (scl["post-event"] == 8) | (scl["pre-event"] == 9) | (scl["post-event"] == 9) ) ] = 2 output_name = "BURNED_AREA_{}".format( "_".join( [s2_item.datetime.strftime("%Y%m%d") for key, s2_item in items.items()] ) ) item_id = "burned-area" write_tif( burned, os.path.join(item_id, f"{output_name}.tif"), width, height, geotransform, georef, ) item = Item( id=item_id, geometry=items["pre-event"].geometry, bbox=items["pre-event"].bbox, datetime=items["pre-event"].datetime, properties={}, ) item.add_asset( key="data", asset=Asset( href=os.path.join(".", f"{output_name}.tif"), media_type=MediaType.COG, title="Burned area analysis from Sentinel-2", ), ) return item
if running_start_dt > start_dt: running_start_dt = start_dt else: running_start_dt = start_dt if running_end_dt: if running_end_dt < end_dt: running_end_dt = end_dt else: running_end_dt = end_dt fid = feature["id"] binary_asset = Asset( href="{}-usfimr.wkb".format(fid), description="well known binary representation", media_type="application/wkb", ) text_asset = Asset( href="{}-usfimr.wkt".format(fid), description="well known text representation", media_type="application/wkt", ) json_asset = Asset( href="{}-usfimr.geojson".format(fid), description="geojson representation", media_type="application/geo+json", ) serializable_convex_hull = mapping(shapely_geom.convex_hull) item = Item(fid, serializable_convex_hull, bbox_list, start_dt, deepcopy(props))
start_time = datetime.fromisoformat(time_range["from"][:-1]) end_time = datetime.fromisoformat(time_range["to"][:-1]) temporal_extent = TemporalExtent(intervals=[[start_time, end_time]]) stac_items = [] for group_id, image_group in imagery_grouped: # assemble assets so that they might be grouped (without duplication) in items assets = [] for image in image_group: s3_path = "s3://" + image.bucket_name + "/" + image.key # The extents should be the same, so whichever one is checked last should be fine with rio.open(s3_path) as img: bounds = img.bounds assets.append(Asset(s3_path)) if aggregate_bounds is None: aggregate_bounds = bounds else: aggregate_bounds = rio.coords.BoundingBox( min(bounds.left, aggregate_bounds.left), min(bounds.bottom, aggregate_bounds.bottom), max(bounds.right, aggregate_bounds.right), max(bounds.top, aggregate_bounds.top), ) item_spatial_extent = SpatialExtent( [[bounds.bottom, bounds.left, bounds.top, bounds.right]]) item_extent = Extent(item_spatial_extent, temporal_extent) image_item = Item(
def process_item(self, item, source): """ Processes a single input item. Services that are not aggregating multiple input files should prefer to implement this method rather than #invoke This example copies its input to the output, marking "dpi" and "variables" message attributes as having been processed Parameters ---------- item : pystac.Item the item that should be processed source : harmony.message.Source the input source defining the variables, if any, to subset from the item Returns ------- pystac.Item a STAC catalog whose metadata and assets describe the service output """ result = item.clone() result.assets = {} # Create a temporary dir for processing we may do workdir = mkdtemp() try: # Get the data file asset = next(v for k, v in item.assets.items() if 'data' in (v.roles or [])) input_filename = download(asset.href, workdir, logger=self.logger, access_token=self.message.accessToken) # Mark any fields the service processes so later services do not repeat work dpi = self.message.format.process('dpi') # Variable subsetting variables = source.process('variables') # Do the work here! var_names = [v.name for v in variables] print('Processing item %s, DPI=%d, vars=[%s]' % (item.id, dpi, ', '.join(var_names))) working_filename = os.path.join(workdir, 'tmp.txt') shutil.copyfile(input_filename, working_filename) # Stage the output file with a conventional filename output_filename = generate_output_filename(asset.href, ext=None, variable_subset=None, is_regridded=False, is_subsetted=False) url = stage(working_filename, output_filename, 'text/plain', location=self.message.stagingLocation, logger=self.logger) # Update the STAC record result.assets['data'] = Asset(url, title=output_filename, media_type='text/plain', roles=['data']) # Other metadata updates may be appropriate, such as result.bbox and result.geometry # if a spatial subset was performed # Return the STAC record return result finally: # Clean up any intermediate resources shutil.rmtree(workdir)
def metadata_asset(self, base: str = DEFAULT_BASE) -> Asset: """Returns the data asset (aka the tiff file).""" return Asset(href=self._asset_href_with_extension(base, "xml"), media_type=MediaType.XML, roles=["metadata"])
def thumbnail_asset(self, base: str = DEFAULT_BASE) -> Asset: """Returns the thumbnail asset.""" return Asset(href=self._asset_href_with_extension(base, "jpg"), media_type=MediaType.JPEG, roles=["thumbnail"])
def example_asset(self) -> Asset: return Asset.from_dict(self.asset_dict)