def create_collections(collections): dg_collection = Collection.open( os.path.join(root_url, 'DGOpenData', 'catalog.json')) # Create collections if not exist current_cat_names = [ x.split('/')[-2] for x in dg_collection.links(rel='child') ] out_d = {} for coll in collections: if coll['id'] not in current_cat_names: print("Creating new collection: {}".format(coll['id'])) new_coll = Collection(coll) dg_collection.add_catalog(new_coll) out_d.update({coll['id']: new_coll}) dg_collection.save() else: print("Opening existing collection: {}".format(coll['id'])) out_d.update({ coll['id']: Collection.open( os.path.join(root_url, 'DGOpenData', coll['id'], 'catalog.json')) }) return out_d
def populateDatabase(): for item in col.items(): print(item) #item2 = Item.open(item) #print(item2) row = item.properties['eo:row'] column = item.properties['eo:column'] data = item.properties['datetime'] data = data[0:10] id = str(item) url = f'https://landsat-stac.s3.amazonaws.com/landsat-8-l1/{column}/{row}/{data}/{id}.json' test = Collection.open(url) test.save('mycat/catalog' + str(i) + '.json') with open('mycat/catalog' + str(i) + '.json') as json_file: test2 = json.load(json_file) test2['_id'] = test2['id'] del test2['id'] del test2['assets'] with open('mycat/catalog' + str(i) + '.json', 'w') as outfile: json.dump(test2, outfile) with open('mycat/catalog' + str(i) + '.json') as f: file_data = json.load(f) collection_currency.insert(file_data) print(url) i = i + 1 client.close()
def test_add_item(self): cat = Catalog.create(root='http://my.cat').save( os.path.join(self.path, 'catalog.json')) col = Collection.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json')) cat.add_catalog(col) item = Item.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/item.json')) col.add_item(item) assert (item.parent().id == 'landsat-8-l1')
def find_items(collection_name, sensor_name=None): col = Collection.open( os.path.join(root_url, 'DGOpenData', collection_name, 'catalog.json')) for item in col.items(): if sensor_name: if 'eo:platform' in item.properties: if item.properties['eo:platform'] == sensor_name: yield item else: yield item
def test_add_item_with_subcatalogs(self): cat = Catalog.create(root='http://my.cat').save( os.path.join(self.path, 'test_subcatalogs.json')) col = Collection.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json')) cat.add_catalog(col) item = Item.open( os.path.join(testpath, 'catalog/eo/landsat-8-l1/item.json')) col.add_item(item, path='${landsat:path}/${landsat:row}/${date}') assert (item.root().id == cat.id) assert (item.collection().id == col.id) # test code using existing catalogs col.add_item(item, '${landsat:path}/${landsat:row}/${date}') assert (item.root().id == cat.id)
def lambda_handler(event, context): logger.info('Event: %s' % json.dumps(event)) collection = Collection.open( 'https://sentinel-stac.s3.amazonaws.com/sentinel-2-l1c/catalog.json') msg = json.loads(event['Records'][0]['Sns']['Message']) logger.debug('Message: %s' % json.dumps(msg)) for m in msg['tiles']: url = op.join(SETTINGS['roda_url'], m['path'], 'tileInfo.json') metadata = read_remote(url) logger.debug('Metadata: %s' % json.dumps(metadata)) # transform to STAC item = transform(metadata) logger.info('Item: %s' % json.dumps(item.data)) #collection.add_item(item, path=SETTINGS['path_pattern'], filename=SETTINGS['fname_pattern']) #logger.info('Added %s as %s' % (item, item.filename)) client.publish(TopicArn=sns_arn, Message=json.dumps(item.data)) logger.info('Published to %s' % sns_arn)
def update_collection(event, context): collection_root = os.getenv('COLLECTION_ROOT') path = os.getenv('PATH') filename = os.getenv('FILENAME') item_count = len(event['Records']) stac_links = [] for record in event['Records']: stac_item = json.loads(record['body']) print(stac_item) col = Collection.open(collection_root) collection_name = col.id kwargs = {'item': Item(stac_item)} if path: kwargs.update({'path': '$' + '/$'.join(path.split('/'))}) if filename: kwargs.update({'filename': '$' + '/$'.join(filename.split('/'))}) print(kwargs) col.add_item(**kwargs) col.save() stac_links.append(kwargs['item'].links('self')[0]) # Send message to SNS Topic if enabled if NOTIFICATION_TOPIC: kwargs = utils.stac_to_sns(kwargs['item'].data) kwargs.update({ 'TopicArn': f"arn:aws:sns:{REGION}:{ACCOUNT_ID}:{NOTIFICATION_TOPIC}" }) sns_client.publish(**kwargs) print( f"LOGS CollectionName: {collection_name}\tItemCount: {item_count}\tItemLinks: {stac_links}" )
def update_collection(root, long_poll, concurrency, path, filename): # Create a SQS queue for the collection # Subscribe SQS queue to SNS topic with filter policy on collection name # Configure lambda function and attach to SQS queue (use ENV variables to pass state) name = Collection.open(root).id filter_rule = {'collection': [name]} pattern = re.compile('[\W_]+') name = pattern.sub('', name) with open(sls_config_path, 'r') as f: # Using unsafe load to preserve type. sls_config = yaml.unsafe_load(f) aws_resources = resources.update_collection(name, root, filter_rule, long_poll, concurrency, path, filename) sls_config['resources']['Resources'].update(aws_resources['resources']) sls_config['functions'].update(aws_resources['functions']) with open(sls_config_path, 'w') as outf: yaml.dump(sls_config, outf, indent=1)
def open_collection(self): filename = os.path.join(testpath, 'catalog/eo/landsat-8-l1/catalog.json') return Collection.open(filename)
from pymongo import MongoClient client = MongoClient( 'mongodb+srv://piAdmin:[email protected]/test?retryWrites=true&w=majority', 27017) db = client['metadata'] collection_currency = db['landsat'] cat = Catalog.open( 'https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json') print(cat) #print(data) #test # create a Catalog object with JSON #mycat = Catalog(data) #https://landsat-stac.s3.amazonaws.com/landsat-8-l1/ LC80101172015002LGN00 col = Collection.open( 'https://landsat-stac.s3.amazonaws.com/landsat-8-l1/catalog.json') print(col, col.extent) #print(col.items()) i = 1 def populateDatabase(): for item in col.items(): print(item) #item2 = Item.open(item) #print(item2) row = item.properties['eo:row'] column = item.properties['eo:column'] data = item.properties['datetime'] data = data[0:10] id = str(item)
import os.path as op from shapely.geometry import MultiPoint, Point from shapely import geometry from datetime import datetime, timedelta from dateutil.parser import parse from pyproj import Proj, transform as reproj from satstac import Collection, Item, utils from .utils import get_matching_s3_keys, read_from_s3 from .version import __version__ logger = logging.getLogger(__name__) _collection = Collection.open( op.join(op.dirname(__file__), 'sentinel-2-l1c.json')) SETTINGS = { 'roda_url': 'https://roda.sentinel-hub.com/sentinel-s2-l1c', 's3_url': 'https://sentinel-s2-l1c.s3.amazonaws.com', 'inv_bucket': 'sentinel-inventory', 'inv_key': 'sentinel-s2-l1c/sentinel-s2-l1c-inventory', 'path_pattern': '${sentinel:utm_zone}/${sentinel:latitude_band}/${sentinel:grid_square}', 'fname_pattern': '${date}/${id}' } def add_items(catalog, records, start_date=None,
def ingest_items(self, url): root = Collection.open(url) for item in root.items(): AssetLoader(item, self.config.API_ENDPOINT).ingest()
def ingest_collections(self, url): root = Collection.open(url) for coll in root.collections(): AssetLoader(coll, self.config.API_ENDPOINT).ingest()
def ingest_collection(self, url): AssetLoader(Collection.open(url), self.config.API_ENDPOINT).ingest()