def test_cli_publish(self): cat = Catalog.create(root='https://my.cat').save_as('catalog.json') input = "sat-stac publish catalog.json https://my.kitten" sys.argv = split(input) cli() cat = Catalog.open('catalog.json') assert (cat.links('self')[0] == 'https://my.kitten/catalog.json') os.remove('catalog.json')
def test_add_catalog(self): cat = Catalog.create(root='http://my.cat').save_as( os.path.join(self.path, 'catalog.json')) col = Catalog.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json')) cat.add_catalog(col) child = [c for c in cat.children()][0] assert (child.id == col.id)
def create_year_catalogs(self, year_list, ds_name): for year in year_list: year_catalog = Catalog({ "id": year, "stac_version": "0.7.0", "description": "Data acquired during the year {}".format(year) }) cat = Catalog.open(os.path.join(self.root, ds_name, 'catalog.json')) cat.add_catalog(year_catalog) cat.save()
def cli(): args = parse_args(sys.argv[1:]) logging.basicConfig(stream=sys.stdout, level=args.pop('log') * 10) cmd = args.pop('command') if cmd == 'ingest': cat = Catalog.open(args['catalog']) if args['filename'] is not None: records = sentinel.read_inventory(args['filename']) else: records = sentinel.latest_inventory() sentinel.add_items(cat, records, start_date=args['start'], end_date=args['end'], prefix=args['prefix'], s3meta=args['s3meta'], publish=args['publish']) elif cmd == 'inventory': with open(args['filename'], 'w') as f: f.write('datetime,path\n') [ f.write('%s,%s\n' % (i['datetime'], i['path'])) for i in sentinel.latest_inventory() ]
def test_publish(self): path = os.path.join(self.path, 'test_publish') shutil.copytree(os.path.join(testpath, 'catalog'), path) cat = Catalog.open(os.path.join(path, 'catalog.json')) cat.publish('https://my.cat') item = Item.open(os.path.join(path, 'eo/landsat-8-l1/item.json')) assert (item.links('self')[0] == 'https://my.cat/eo/landsat-8-l1/item.json')
def test_add_item(self): cat = Catalog.create(root='http://my.cat').save( os.path.join(self.path, 'catalog.json')) col = Collection.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json')) cat.add_catalog(col) item = Item.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/item.json')) col.add_item(item) assert (item.parent().id == 'landsat-8-l1')
def create_project_collections(self, projects, ds_name): with open(projects, 'r') as geoj: data = json.load(geoj) for feat in data['features']: if feat['extent']['temporal'][0]: year = feat['extent']['temporal'][0].split('-')[0] year_cat = Catalog.open( os.path.join(self.root, ds_name, year, 'catalog.json')) coll = Collection(feat) year_cat.add_catalog(coll) year_cat.save()
def gen_items(stac_link): """ Generate STAC Items from STAC Catalog entrypoint. """ cat = Catalog.open(stac_link) # Check if root if cat.id == cat.root(): for child in cat.children(): for item in child.items(): yield item else: for item in cat.items(): yield item
def test_add_item_with_subcatalogs(self): cat = Catalog.create(root='http://my.cat').save( os.path.join(self.path, 'test_subcatalogs.json')) col = Collection.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json')) cat.add_catalog(col) item = Item.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/item.json')) col.add_item(item, path='${landsat:path}/${landsat:row}/${date}') assert (item.root().id == cat.id) assert (item.collection().id == col.id) # test code using existing catalogs col.add_item(item, '${landsat:path}/${landsat:row}/${date}') assert (item.root().id == cat.id)
def check_stac(response): """Asserts if the response contains a valid STAC catalog and prints it out. More robust assertions could be done here in the future to confirm that the STAC metadata is valid per the request parameters Arguments: response {response.Response} -- the response to display """ for i in range(len(response.json()['links'])): if response.json()['links'][i]['title'] == 'STAC catalog': stac_url = response.json()['links'][i]['href'] assert(stac_url) cat = Catalog.open(stac_url) for i in cat.items(): assert(i.id) assert(i.datetime) assert(i.bbox) assert(i.assets.keys()) print('STAC Item') print('\t', 'ID:', i.id) print('\t', 'Date:', i.datetime) print('\t', 'Bounding Box:', i.bbox) print('\t', 'File:', list(i.assets.keys()))
def build_stac_catalog(id_list=None, verbose=False): prefix = '/data/' tempdir = tempfile.mkdtemp(prefix=prefix) tempthumbs = tempfile.mkdtemp(prefix=prefix) print("Catalog tempdir: {}".format(tempdir)) print("Thumbnails tempdir: {}".format(tempthumbs)) NoaaStormCatalog.verbose = verbose print("Running web scraper.") with ScrapyRunner(NoaaStormCatalog) as runner: scraped_items = list(runner.execute(ids=id_list)) collections = scraped_items.pop(0) item_count = scraped_items.pop(0) collections = create_collections(collections, scraped_items, id_list) # Build stac catalog locally root_catalog = Catalog.open(os.path.join(ROOT_URL, 'catalog.json')) root_catalog.save_as(filename=os.path.join(tempdir, 'catalog.json')) # NOAA Storm catalog os.mkdir(os.path.join(tempdir, 'NOAAStorm')) noaa_storm_cat = Catalog.open( os.path.join(ROOT_URL, 'NOAAStorm', 'catalog.json')) noaa_storm_cat.save_as( filename=os.path.join(tempdir, 'NOAAStorm', 'catalog.json')) print("Creating collections.") d = {} for collection in collections: coll = Collection(collection) noaa_storm_cat.add_catalog(coll) d.update({collection['id']: coll}) # Setup directories for thumbnails thumbdir = os.path.join(tempthumbs, 'thumbnails') os.mkdir(thumbdir) for coll in d: coll_dir = os.path.join(thumbdir, d[coll].id) if not os.path.exists(coll_dir): os.mkdir(coll_dir) # Sort assets archive_assets = [] for item in scraped_items: if 'archive' in item: if item['archive'].endswith('_RGB.tar'): archive_assets.append( RGBArchive( item, os.path.join(thumbdir, d[item['event_name']].id))) elif item['archive'].endswith( ('GCS_NAD83.tar', 'GCS_NAD83.zip')): archive_assets.append( JpegTilesArchive( item, os.path.join(thumbdir, d[item['event_name']].id))) elif item['archive'].endswith(('Oblique.tar', 'Oblique.zip')): archive_assets.append( ObliqueArchive( item, os.path.join(thumbdir, d[item['event_name']].id))) else: print("Found a JPG with disconnected world file") # Download archives download_archives(archive_assets, prefix) print("Creating items and thumbnails.") # Add items for item in build_stac_items(archive_assets): d[item['collection']].add_item(Item(item), path='${date}', filename='${id}') # Update spatial extent of collection try: if item['bbox'][0] < d[ item['collection']].extent['spatial'][0]: d[item['collection']].extent['spatial'][0] = item['bbox'][ 0] if item['bbox'][1] < d[ item['collection']].extent['spatial'][1]: d[item['collection']].extent['spatial'][1] = item['bbox'][ 1] if item['bbox'][2] < d[ item['collection']].extent['spatial'][2]: d[item['collection']].extent['spatial'][2] = item['bbox'][ 2] if item['bbox'][3] < d[ item['collection']].extent['spatial'][3]: d[item['collection']].extent['spatial'][3] = item['bbox'][ 3] except: d[item['collection']].extent['spatial'] = item['bbox'] # Update temporal extent of collection try: item_dt = load_datetime(item['properties']['datetime']) min_dt = load_datetime( d[item['collection']].extent['temporal'][0]) max_dt = load_datetime( d[item['collection']].extent['temporal'][1]) if item_dt < min_dt: d[item['collection']].extent['temporal'][0] = item[ 'properties']['datetime'] if item_dt > max_dt: d[item['collection']].extent['temporal'][1] = item[ 'properites']['datetime'] except: d[item['collection']].extent['temporal'] = [ item['properties']['datetime'], item['properties']['datetime'] ] # Upload catalog to S3 print("Uploading catalog to S3.") subprocess.call(f"aws s3 sync {tempdir} s3://cognition-disaster-data/", shell=True) print("Uploading thumbnails to S3.") # Upload thumbnails to S3 subprocess.call( f"aws s3 sync {thumbdir} s3://cognition-disaster-data/thumbnails/", shell=True) cleanup(prefix)
def create_datasource_catalog(self, ds_name): cat = Catalog.open(os.path.join(self.root, 'catalog.json')) ds_cat = Catalog(datasource_catalogs[ds_name]) cat.add_catalog(ds_cat) cat.save()
def get_catalog(cls): """ Open existing test catalog """ return Catalog.open(os.path.join(testpath, 'catalog/catalog.json'))
def create_catalog(cls, name): path = os.path.join(cls.path, name) return Catalog.create(path)
from satstac import Catalog, Collection, Item import urllib.request, json from pymongo import MongoClient client = MongoClient( 'mongodb+srv://piAdmin:[email protected]/test?retryWrites=true&w=majority', 27017) db = client['metadata'] collection_currency = db['landsat'] cat = Catalog.open( 'https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json') print(cat) #print(data) #test # create a Catalog object with JSON #mycat = Catalog(data) #https://landsat-stac.s3.amazonaws.com/landsat-8-l1/ LC80101172015002LGN00 col = Collection.open( 'https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json') print(col, col.extent) #print(col.items()) i = 1 def populateDatabase(): for item in col.items(): print(item) #item2 = Item.open(item) #print(item2) row = item.properties['eo:row'] column = item.properties['eo:column']
def test_main(self): """ Run main function """ # create test catalog fname = op.join(testpath, 'test_main', 'catalog.json') cat = Catalog.create(id='test').save_as(fname) assert(op.exists(fname))
def test_init(self): with open(os.path.join(testpath, 'catalog/catalog.json')) as f: data = json.loads(f.read()) cat = Catalog(data) assert (cat.id == 'stac')
def test_add_catalog_without_saving(self): cat = Catalog.create() with self.assertRaises(STACError): cat.add_catalog({})
def create_root_catalog(self): cat = Catalog(cat_json, root=self.root) cat.save_as(os.path.join(self.root, 'catalog.json'))
def test_create_with_keywords(self): path = os.path.join(testpath, 'test-catalog', 'create_with_keywords') desc = 'this is a catalog' cat = Catalog.create(path, description=desc) assert (cat.description == desc)
def test_create(self): """ Create new catalog file """ cat = Catalog.create() assert (cat.id == 'stac-catalog')