def main(ctx, input_path): # dump the CWL and params (if requested) dump(ctx) if 'TMPDIR' in os.environ: os.chdir(os.environ['TMPDIR']) logging.info(os.path.join(input_path, 'catalog.json')) item = get_item(os.path.join(input_path, 'catalog.json')) output_dir = f'{item.id}' calibrator = Calibrator() item_out = calibrator.calibrate(item) logging.info('STAC') cat = Catalog(id='catalog', description="Calibrated sar product") cat.add_items([item_out]) cat.normalize_and_save(root_href='./', catalog_type=CatalogType.SELF_CONTAINED) logging.info('Done!') #os.mkdir(output_dir) sys.exit(0)
def main(ctx, ndvi_threshold, ndwi_threshold, pre_event, post_event): dump(ctx) os.environ["PREFIX"] = "/opt/anaconda/envs/env_burned_area" os.environ["PROJ_LIB"] = os.path.join(os.environ["PREFIX"], "share/proj") os.environ["GDAL_DATA"] = os.path.join(os.environ["PREFIX"], "share/gdal") burned_area_item = burned( pre_item=get_item(os.path.join(pre_event, "catalog.json")), post_item=get_item(os.path.join(post_event, "catalog.json")), ndvi_threshold=ndvi_threshold, ndwi_threshold=ndwi_threshold, ) logging.info("Output catalog") catalog = Catalog(id="catalog", description="Results") catalog.clear_items() catalog.clear_children() catalog.add_items([burned_area_item]) catalog.describe() catalog.normalize_and_save(root_href="./", catalog_type=CatalogType.SELF_CONTAINED)
def main(data_dir, input_references, store_username, store_apikey): if store_username is not None: os.environ['STAGEIN_USERNAME'] = store_username os.environ['STAGEIN_PASSWORD'] = store_apikey STAC_IO.read_text_method = my_read_method items = [] for input_reference in input_references: thing = pystac.read_file(input_reference) if isinstance(thing, pystac.item.Item): items.append(thing) elif isinstance(thing, pystac.catalog.Catalog): for item in thing.get_items(): items.append(item) # create catalog catalog = Catalog(id='catalog', description='staged STAC catalog') catalog.add_items(items) catalog.normalize_and_save(root_href=data_dir, catalog_type=CatalogType.RELATIVE_PUBLISHED) catalog.describe()
def test_full_copy_2(self): with TemporaryDirectory() as tmp_dir: cat = Catalog(id='test', description='test catalog') image_item = Item(id='Imagery', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}) for key in ['ortho', 'dsm']: image_item.add_asset( key, Asset(href='some/{}.tif'.format(key), media_type=MediaType.GEOTIFF)) label_item = LabelItem( id='Labels', geometry=RANDOM_GEOM, bbox=RANDOM_BBOX, datetime=datetime.utcnow(), properties={}, label_description='labels', label_type='vector', label_properties='label', label_classes=[LabelClasses(classes=['one', 'two'], name='label')], label_tasks=['classification']) label_item.add_source(image_item, assets=['ortho']) cat.add_items([image_item, label_item]) cat.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-source')) cat.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) cat2 = cat.full_copy() cat2.normalize_hrefs(os.path.join(tmp_dir, 'catalog-full-copy-2-dest')) cat2.save(catalog_type=CatalogType.ABSOLUTE_PUBLISHED) self.check_catalog(cat, 'source') self.check_catalog(cat2, 'dest')
def test_altered_ids_are_retained(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) message = Message(full_message) items = [ Item('mutate-me', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}), Item('2', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}) ] catalog.add_items(items) adapter = AdapterTester(message, catalog, config=self.config) (message, out_catalog) = adapter.invoke() out_items = [item for item in out_catalog.get_items()] self.assertEqual(out_items[0].id, 'i-mutated-you')
def test_invocation_processes_items_with_sources(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) message = Message(full_message) items = [ Item('1', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}), Item('2', None, [0, 0, 1, 2], '2020-01-01T00:00:00.000Z', {}) ] catalog.add_items(items) adapter = AdapterTester(message, catalog, config=self.config) adapter.invoke() self.assertEqual(AdapterTester.process_args[0][0].bbox, items[0].bbox) self.assertEqual(AdapterTester.process_args[1][0].bbox, items[1].bbox) self.assertEqual(AdapterTester.process_args[0][1], message.sources[0]) self.assertEqual(AdapterTester.process_args[1][1], message.sources[0])
def test_unaltered_ids_are_assigned_new_uuids(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) message = Message(full_message) items = [ Item('1', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}), Item('2', None, [0, 0, 1, 1], '2020-01-01T00:00:00.000Z', {}) ] catalog.add_items(items) adapter = AdapterTester(message, catalog, config=self.config) (message, out_catalog) = adapter.invoke() self.assertNotEqual(out_catalog.id, catalog.id) out_items = [item for item in out_catalog.get_items()] self.assertNotEqual(out_items[0].id, items[0].id) self.assertNotEqual(out_items[1].id, items[1].id)
def test_invocation_recurses_subcatalogs(self): catalog = Catalog('0', 'Catalog 0') catalog.add_link( Link('harmony_source', 'http://example.com/C0001-EXAMPLE')) catalog.add_child(Catalog('1a', 'Catalog 1a')) subcatalog = Catalog('1b', 'Catalog 1b') catalog.add_child(subcatalog) subsubcatalog_a = Catalog('2a', 'Catalog 2a') subsubcatalog_b = Catalog('2b', 'Catalog 2b') subsubcatalog_b.add_link( Link('harmony_source', 'http://example.com/C0002-EXAMPLE')) subcatalog.add_children([subsubcatalog_a, subsubcatalog_b]) message = Message(full_message) items_a = [ Item('3', None, [0, 0, 1, 3], '2020-01-01T00:00:00.000Z', {}), Item('4', None, [0, 0, 1, 4], '2020-01-01T00:00:00.000Z', {}) ] items_b = [ Item('5', None, [0, 0, 1, 5], '2020-01-01T00:00:00.000Z', {}), Item('6', None, [0, 0, 1, 6], '2020-01-01T00:00:00.000Z', {}) ] subsubcatalog_a.add_items(items_a) subsubcatalog_b.add_items(items_b) adapter = AdapterTester(message, catalog, config=self.config) adapter.invoke() self.assertEqual(AdapterTester.process_args[0][0].bbox, items_a[0].bbox) self.assertEqual(AdapterTester.process_args[1][0].bbox, items_a[1].bbox) self.assertEqual(AdapterTester.process_args[2][0].bbox, items_b[0].bbox) self.assertEqual(AdapterTester.process_args[3][0].bbox, items_b[1].bbox) self.assertEqual(AdapterTester.process_args[0][1], message.sources[0]) self.assertEqual(AdapterTester.process_args[1][1], message.sources[0]) self.assertEqual(AdapterTester.process_args[2][1], message.sources[1]) self.assertEqual(AdapterTester.process_args[3][1], message.sources[1])
def stage(input_references): STAC_IO.read_text_method = my_read_method catalogs = [] for index, input_reference in enumerate(input_references): items = [] thing = read_file(input_reference) if isinstance(thing, Item): items.append(thing) elif isinstance(thing, Catalog): for item in thing.get_items(): items.append(item) # create catalog catalog = Catalog(id=items[0].id, description='staged STAC catalog with {}'.format(items[0].id)) catalog.add_items(items) catalog.normalize_and_save(root_href=items[0].id, catalog_type=CatalogType.RELATIVE_PUBLISHED) catalog.describe() catalogs.append(os.path.dirname(catalog.get_self_href())) return catalogs
def main(ctx, input_reference, s_expression, cbn): dump(ctx) item = get_item(os.path.join(input_reference, "catalog.json")) logging.info(f"Processing {item.id}") try: os.mkdir(item.id) except FileExistsError: pass cbn = cbn.replace(' ', '-') result = os.path.join(item.id, f"{cbn}.tif") logging.info(f"Apply {s_expression} to {item.id}") apply_s_expression(item=item, s_expression=s_expression, out_tif=result) logging.info("STAC") item_out = Item( id=item.id, geometry=item.geometry, bbox=item.bbox, datetime=item.datetime, properties=item.properties, stac_extensions=item.stac_extensions, ) eo_item = extensions.eo.EOItemExt(item_out) asset_properties = dict() asset_properties["s-expression"] = s_expression asset = Asset( href=os.path.basename(result), media_type=MediaType.COG, roles=["data"], properties=asset_properties, ) eo_bands = [ extensions.eo.Band.create( name=cbn.lower(), common_name=cbn.lower(), description=f"{cbn.lower()} ({s_expression})", ) ] eo_item.set_bands(eo_bands, asset=asset) item_out.add_asset(key=cbn.lower(), asset=asset) logging.info("STAC") cat = Catalog(id="catalog", description="s-expression") cat.add_items([item_out]) cat.normalize_and_save(root_href="./", catalog_type=CatalogType.SELF_CONTAINED) logging.info("Done!")
def scombi(channel_inputs, bands, s_expressions, resolution='highest', aoi=None, color=None, profile=None, lut=None, epsg=None): target_dir = 'combi' if not os.path.exists(target_dir): os.mkdir(target_dir) items = [] assets_href = [] rescaled = [] for index, input_path in enumerate(channel_inputs): #for index, input_path in enumerate([red_channel_input, green_channel_input, blue_channel_input]): if input_path is None: items.append(None) assets_href.append(None) continue item = get_item(input_path) logging.info(item) items.append(item) assets_href.append(get_band_asset_href(item, bands[index])) # define AOI, if none is supplied, get the minimum bbox if aoi is None: aoi = get_mbb([shape(item.geometry) for item in items]).wkt min_lon, min_lat, max_lon, max_lat = loads(aoi).bounds # analyze get an EPSG code if it hasn't been supplied # check if warp is needed epsg, epsg_codes = get_epsg(epsg, assets_href) # rescale and get the original assets (these are part of the output) logging.info('Rescaling and COG for input assets') rescaled = [] # get the data for index, asset_href in enumerate(assets_href): if asset_href is None: rescaled.append(None) continue logging.info('Getting band {} from {}'.format(bands[index], asset_href)) output_name = '{}/{}_{}.tif'.format(target_dir, index+1, bands[index]) if epsg_codes[index] == epsg: ds = gdal.Translate(output_name, asset_href, outputType=gdal.GDT_Float32, projWin=[min_lon, max_lat, max_lon, min_lat], projWinSRS='EPSG:4326') else: logging.info('Warp') ds = gdal.Warp(output_name, asset_href, outputType=gdal.GDT_Float32, outputBounds=[min_lon, min_lat, max_lon, max_lat], outputBoundsSRS='EPSG:4326', dstSRS=epsg) ds = None del(ds) #rescaled.append(ds) rescaled.append(output_name) # build a VRT with the rescaled assets with the selected resolution mode logging.info('Build VRT') vrt = 'temp.vrt' ds = gdal.BuildVRT(vrt, [ds for ds in rescaled if ds], resolution=resolution, separate=True) ds.FlushCache() output_cell_size = ds.GetGeoTransform()[1] logging.info(str(output_cell_size)) logging.info('Pimp me') pimp.me(vrt, f'{target_dir}/combi.tif', bands, s_expressions, color, lut) ds = None del(ds) # to STAC logging.info('STAC') cat = Catalog(id='scombidooo', description="Combined RGB composite") # TODO fix datetime item = Item(id='combi', geometry=mapping(loads(aoi)), bbox=list(loads(aoi).bounds), datetime=items[0].datetime, properties={'bands': bands, 's_expressions': s_expressions, 'input_items': [_item.id for _item in items], 'color': 'N/A' if not color else color, 'profile': 'N/A' if not profile else profile}) item.common_metadata.set_gsd(output_cell_size) eo_item = extensions.eo.EOItemExt(item) for index, asset_href in enumerate(assets_href): if asset_href is None: continue _asset = get_band_asset(items[index], bands[index]) _asset.href = './{}_{}.tif'.format(index+1, bands[index]) item.add_asset('{}_{}'.format(index+1, bands[index]), _asset) # add the result.tif Asset item.add_asset(key='rgb', asset=Asset(href='./combi.tif', media_type=MediaType.COG)) cat.add_items([item]) cat.normalize_and_save(root_href='./', catalog_type=CatalogType.SELF_CONTAINED) logging.info('Done!') return(cat.get_self_href())
def main(ndvi_threshold, ndwi_threshold, pre_event, post_event): os.environ['PREFIX']='/opt/anaconda/envs/env_burned_area' os.environ['PROJ_LIB'] = os.path.join(os.environ['PREFIX'], 'share/proj') os.environ['GDAL_DATA'] = os.path.join(os.environ['PREFIX'], 'share/gdal') s2_item_pre = S2_stac_item(pre_event['value']) s2_item_post = S2_stac_item(post_event['value']) s2_items = dict() s2_items['pre-event'] = S2_stac_item(pre_event['value']) s2_items['post-event'] = S2_stac_item(post_event['value']) dates = [] bboxes = [] for index, item in enumerate([s2_item_pre.item, s2_item_post.item]): dates.append(item.datetime) bboxes.append(shape(item.geometry).bounds) logging.info('Stacking bands for input {}'.format(item.id)) vrt_bands = [] for band in ['B04', 'B08', 'B11', 'SCL']: vrt_bands.append('/vsicurl/{}'.format(item.assets[band].get_absolute_href())) vrt = '{}.vrt'.format('pre_event' if index == 0 else 'post_event') tif = '{}.tif'.format('pre_event' if index == 0 else 'post_event') logging.info('Build vrt for {}'.format(item.id)) ds = gdal.BuildVRT(vrt, vrt_bands, srcNodata=0, xRes=10, yRes=10, separate=True) ds.FlushCache() logging.info('Translate {}'.format(item.id)) gdal.Translate(tif, vrt, outputType=gdal.GDT_UInt16) os.remove(vrt) ds = gdal.Open('pre_event.tif') pre_b04 = ds.GetRasterBand(1).ReadAsArray() pre_b08 = ds.GetRasterBand(2).ReadAsArray() pre_b11 = ds.GetRasterBand(3).ReadAsArray() pre_scl = ds.GetRasterBand(4).ReadAsArray() ds = None os.remove('pre_event.tif') ds = gdal.Open('post_event.tif') post_b04 = ds.GetRasterBand(1).ReadAsArray() post_b08 = ds.GetRasterBand(2).ReadAsArray() post_b11 = ds.GetRasterBand(3).ReadAsArray() post_scl = ds.GetRasterBand(4).ReadAsArray() width = ds.RasterXSize height = ds.RasterYSize input_geotransform = ds.GetGeoTransform() input_georef = ds.GetProjectionRef() ds = None os.remove('post_event.tif') gain = 10000 pre_ndwi2 = (pre_b08 / gain - pre_b11 / gain) / (pre_b08 / gain + pre_b11 / gain) post_ndwi2 = (post_b08 / gain - post_b11 / gain) / (post_b08 / gain + post_b11 / gain) pre_b11 = None post_b11 = None pre_ndvi = (pre_b08 / gain - pre_b04 / gain) / (pre_b08 / gain + pre_b04 / gain) post_ndvi = (post_b08 / gain - post_b04 / gain) / (post_b08 / gain + post_b04 / gain) pre_b04 = None post_b04 = None pre_b08 = None post_b08 = None conditions = (((post_ndwi2 - pre_ndwi2) > float(ndwi_threshold['value'])) & ((post_ndvi - pre_ndvi) > float(ndvi_threshold['value'])) & (pre_scl == 4) | (post_scl == 4)) burned = np.zeros((height, width), dtype=np.uint8) burned[conditions] = 1 pre_ndwi2 = None post_ndwi2 = None pre_ndvi = None post_ndvi = None burned[np.where((pre_scl == 0) | (post_scl == 0) | (pre_scl == 1) | (post_scl == 1) | (pre_scl == 5) | (post_scl == 5) | (pre_scl == 6) | (post_scl == 6) | (pre_scl == 7) | (post_scl == 7) | (pre_scl == 8) | (post_scl == 8) | (pre_scl == 9) | (post_scl == 9))] = 2 logging.info('Write output product') output_name = 'S2_BURNED_AREA_{}'.format('_'.join([d.strftime("%Y%m%d") for d in dates])) write_tif(burned, '{}.tif'.format(output_name), width, height, input_geotransform, input_georef) logging.info('Output catalog') catalog = Catalog(id='catalog', description='Results') catalog.clear_items() catalog.clear_children() result_titles = dict() result_titles[output_name] = {'title': 'Burned area analysis from Sentinel-2', 'media_type': MediaType.COG} items = [] for key, value in result_titles.items(): result_item = Item(id=key, geometry=s2_items['pre-event'].item.geometry, bbox=s2_items['pre-event'].item.bbox, datetime=s2_items['pre-event'].item.datetime, properties={}) result_item.add_asset(key='data', asset=Asset(href='./{}.tif'.format(key), media_type=value['media_type'], title=value['title'])) items.append(result_item) #collection.add_items(items) catalog.add_items(items) catalog.describe() catalog.normalize_and_save(root_href='./', catalog_type=CatalogType.SELF_CONTAINED) shutil.move('{}.tif'.format(output_name), os.path.join('./', output_name, '{}.tif'.format(output_name)))