def test_get_json_object_json_does_not_exist(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) catalog = s3.get_json_object(bucket_name=BUCKET, object_name='/path2/catalog.jpg') assert not catalog
def test_repository_can_get_catalogs_from_s3_path(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_stac_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) repo = repository.S3Repository(s3) catalogs = repo.get_catalogs_from_path(bucket=BUCKET, path='stac_catalogs/cs_stac') assert catalogs == [load_json('tests/data/catalog.json')]
def test_list_objects_with_suffix_and_prefix(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) objects = s3.list_objects(BUCKET, prefix='/path2', suffix='catalog.json') object_names = [obj.key for obj in objects] assert object_names == ['/path2/catalog.json']
def test_repository_can_get_collections_from_catalog(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_stac_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) catalog_test = load_json('tests/data/catalog.json') collection_test = load_json('tests/data/sentinel-2/collection.json') repo = repository.S3Repository(s3) collections = repo.get_collections_from_catalog(catalog_test) assert collections == [collection_test]
def test_repository_get_items_from_collection(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_stac_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) collection_test = load_json('tests/data/sentinel-2/collection.json') items_test = [load_json(item) for item in get_files_from_dir('tests/data/sentinel-2', 'json') if not item.endswith('collection.json')] repo = repository.S3Repository(s3) items = repo.get_items_from_collection(collection_test) assert any(x != y for x, y in zip(items, items_test))
def test_get_json_object(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) catalog = s3.get_json_object(bucket_name=BUCKET, object_name='/path2/catalog.json') schema = Schema({ 'id': str, 'stac_version': str, 'description': str, 'links': list, 'stac_extensions': list, 'title': str }) assert schema.validate(catalog)
async def message_handler(msg): subject = msg.subject data = msg.data.decode() logger.info(f"Received a message on '{subject}': {data}") r = { 'collection': index_product_definition, 'item': index_dataset } message_type = subject.split('.')[1] if message_type in r.keys(): s3 = S3(key=S3_ACCESS_KEY_ID, secret=S3_SECRET_ACCESS_KEY, s3_endpoint=S3_ENDPOINT, region_name=S3_REGION) repo = repository.S3Repository(s3) dc = Datacube() for k, v in r.items(): if k in subject: v(dc.index, repo, data) os.system('cubedash-gen --init --all') os.system('cubedash-gen --force-refresh')
def test_check_bucket_does_not_exist(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') assert not s3.check_bucket_exist(BUCKET)
def test_check_bucket_exists(): s3 = S3(key=None, secret=None, s3_endpoint=None, region_name='us-east-1') initialise_bucket(s3_resource=s3.s3_resource, bucket_name=BUCKET) assert s3.check_bucket_exist(BUCKET)