def stac_search_by_pagination(cls, result, method, provider_json, collection_name, bbox, datetime, query, limit, limit_to_search): # if there is more results to get, I'm going to search them by pagination for page in range(2, int(limit/MAX_LIMIT) + 1): logging.info('StacBusiness.stac_search_by_pagination - page: %s', page) __result = cls.stac_search(method, provider_json, collection_name, bbox, datetime, query, page, limit_to_search) # logging.info('StacBusiness.stac_search_by_pagination - __result: %s', __result) # if I'm on other page, then I increase the old result result['features'] += __result['features'] result['context']['returned'] += __result['context']['returned'] # logging.info('StacBusiness.stac_search_by_pagination - __result: %s', __result) # get matched variable based on 'result['context']['matched']' context = result['context'] matched = int(context['matched']) logging.info('StacBusiness.stac_search_by_pagination - context: %s', context) # if something was found, then fill 'limit' key with the true limit if matched: context['limit'] = limit return result
def post(self): logging.info('StacBusiness.post()') if request.is_json: body = request.get_json() logging.info('StacBusiness.post() - body: %s', body) data, status = validate(body, 'search_post') logging.info('StacBusiness.post() - data: %s', data) logging.info('StacBusiness.post() - status: %s', status) if status is False: raise BadRequest(dumps(data)) # 400 - Bad Request features = StacBusiness.post_search(**data) # logging.debug('\n\n StacBusiness.post() - features: %s \n\n', features) # pp.pprint(features) return features else: raise BadRequest( "mimetype must indicate JSON data, in other words, mimetype must be equals to `application/json`" )
def search_collections(cls, url): logging.info('StacComposeServices.search_collections()') base_url = '{}/collections?limit=1000'.format(url) logging.info('StacComposeServices.search_collections() - base_url: \'GET %s\'', base_url) response = get(base_url, headers={}) if response and response.status_code in (200, 201): result = loads(response.text) # logging.debug('StacComposeServices.search_collections() - result: {}'.format(result)) return result raise NotFound("URL was not found. [ url: {0}, status_code: {1} ]".format(base_url, response.status_code))
def __init__(self): data = None with open('{}/providers/static/providers.json'.format(BASE_DIR)) as p: data = load(p) self.__providers__ = {} self.__versions__ = {} self.__method__ = {} self.__require_credentials__ = {} self.__downloadable__ = {} self.__filter_mult_collection__ = {} self.__objs__ = {} for key in data.keys(): version = data[key]['version'] url = data[key]['url'] # 'ignore_provider_validation' flag indicates if this provider can be validated or not, # because 'inpe-stac' is slow to run, then it cannot be validated when docker compose is executed if data[key]['ignore_provider_validation']: logging.info( 'Ignored validation of provider: \'{}\''.format(url)) # if STAC URL is not valid, then ignore this provider, it cannot be returned elif not is_url_valid(version, url): logging.warning('Invalid provider: \'{}\''.format(url)) continue self.__providers__[key] = url self.__versions__[key] = data[key]['version'] self.__method__[key] = data[key]['method'] self.__filter_mult_collection__[key] = data[key][ 'filter_mult_collection'] self.__downloadable__[key] = data[key]['downloadable'] self.__require_credentials__[key] = data[key][ 'require_credentials'] self.__objs__[key] = { "url": url, "downloadable": data[key]['downloadable'], "require_credentials": data[key]['require_credentials'] } self.__providers_json__ = data
def get_stac_search(cls, provider_json, query): # default: STAC >= 0.9.0 base_url = '{}/search?{}'.format(provider_json['url'], query) if VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0'): base_url = '{}/stac/search?{}'.format(provider_json['url'], query) logging.info('StacComposeServices.get_stac_search - base_url: \'GET {}\''.format(base_url)) response = get(base_url, headers={}) logging.info(f"StacComposeServices.post_stac_search - response: {response}") if response and response.status_code in (200, 201): return loads(response.text) return {'status_code': response.status_code, 'description': response.text}
def get_collections_by_providers(cls, providers): logging.info('CollectionsBusiness.get_collections_by_providers()') logging.info( 'CollectionsBusiness.get_collections_by_providers() - providers: %s', providers) collections_by_provider = {"providers": []} available_providers = cls.providers_business.get_providers() logging.info( 'CollectionsBusiness.get_collections_by_providers() - available_providers: %s', available_providers) for provider in providers: if provider not in available_providers: raise BadRequest( 'Provider `{}` is not available.'.format(provider)) response = StacComposeServices.search_collections( available_providers[provider]['url']) # logging.debug( # 'CollectionsBusiness.get_collections_by_providers() - response: %s', # response # ) # if there is a `meta` or `links` keys in the response, I can remove them if 'meta' in response: del response['meta'] if 'links' in response: del response['links'] if 'collections' in response: # add the provider name to the response response['id'] = provider # add the collections by provider `p` to the list collections_by_provider["providers"].append(response) else: logging.info( 'CollectionsBusiness.get_collections_by_providers() - provider: %s', provider) raise BadRequest('Invalid provider: `{}`'.format(provider)) # logging.debug( # 'CollectionsBusiness.get_collections_by_providers() - collections_by_provider: %s', # collections_by_provider # ) return collections_by_provider
def post_collections_collection_id_items(cls, url, collection_id, data): """POST /collections/{collection_id}/items""" logging.info('StacComposeServices.post_collections_collection_id_items()\n') base_url = '{}/collections/{}/items'.format(url, collection_id) logging.info('StacComposeServices.post_collections_collection_id_items() - base_url: \'POST {}\''.format(base_url)) r = post( base_url, headers={ 'Content-Type': 'application/json' }, data=dumps(data) ) if r and r.status_code in (200, 201): return loads(r.text) return None
def post_stac_search(cls, provider_json, data): """POST /stac/search""" logging.info(f'StacComposeServices.post_stac_search - provider_json: {provider_json}') # default: STAC >= 0.9.0 base_url = '{}/search'.format(provider_json['url']) if VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0'): base_url = '{}/stac/search'.format(provider_json['url']) logging.info(f"StacComposeServices.post_stac_search - base_url: 'POST {base_url}\'") response = post( base_url, headers={'Content-Type': 'application/json'}, data=dumps(data) ) logging.info(f"StacComposeServices.post_stac_search - response: {response}") if response and response.status_code in (200, 201): return loads(response.text) return {'status_code': response.status_code, 'description': response.text}
def get(self): logging.info('CollectionsController.get()') args = request.args.to_dict(flat=True) logging.info('CollectionsController.get() - args: %s', args) data, status = validate(args, 'controller_validation') logging.info('CollectionsController.get() - data: %s', data) logging.info('CollectionsController.get() - status: %s', status) if status is False: raise BadRequest(dumps(data)) # 400 - Bad Request # List of STAC collections by providers return CollectionsBusiness.get_collections_by_providers( args['providers'].split(','))
def stac_search(cls, method, provider_json, collections, bbox, datetime=False, query=None, page=1, limit=MAX_LIMIT): logging.info('StacBusiness.stac_search\n') logging.info('StacBusiness.stac_search - method: %s', method) logging.info('StacBusiness.stac_search - collections: %s\n', collections) if method == "GET": # return cls.get_stac_search(provider_json, collections, bbox, datetime, query=query, page=1, limit=limit) return CollectionsBusiness.stac_get_items(provider_json, collections, bbox, datetime=datetime, limit=limit) elif method == "POST": return cls.post_stac_search(provider_json, collections, bbox, datetime, query=query, page=1, limit=limit) else: raise BadRequest('Invalid method: {}'.format(method))
def get(self): args = request.args.to_dict(flat=True) logging.info('CollectionsItemsController.get() - args: %s', args) data, status = validate(args, 'search_get') logging.info('CollectionsItemsController.get() - data: %s', data) logging.info('CollectionsItemsController.get() - status: %s', status) if status is False: raise BadRequest(dumps(data)) # 400 - Bad Request features = CollectionsBusiness.search_get(**request.args) # logging.debug('\n\nCollectionsItemsController.get() - features: %s \n\n', features) # pp.pprint(features) return features
def is_url_valid(version, url): # default: STAC >= 0.9.0 url += '/' # if STAC <= '0.7.0' if VersionInfo.parse(version) <= VersionInfo.parse('0.7.0'): url += 'stac' logging.info(f'is_url_valid - version: {version}') logging.info(f'is_url_valid - url: {url}') try: response = get(url, headers={}) except ConnectionError: logging.warning( f"Failed to establish a new connection with 'GET {url}'") return False logging.info(f'is_url_valid - response: {response}') if response and response.status_code in (200, 201): return True return False
def get_collections_collection_id_items(cls, url, collection_id, query): """GET /collections/{collection_id}/items""" logging.info('StacComposeServices.get_collections_collection_id_items()\n') base_url = '{}/collections/{}/items?{}'.format(url, collection_id, query) logging.info('StacComposeServices.get_collections_collection_id_items() - base_url: \'GET {}\'\n'.format(base_url)) response = get(base_url, headers={}) status_code = response.status_code logging.info('StacComposeServices.get_collections_collection_id_items() - response.status_code: {}'.format(status_code)) # logging.info('StacComposeServices.get_collections_collection_id_items() - response.text: {}'.format(response.text)) # logging.info('StacComposeServices.get_collections_collection_id_items() - loads(response.text): {}\n'.format(loads(response.text))) if response and status_code in (200, 201): return loads(response.text) if status_code == 504: raise GatewayTimeout(loads(response.text)) raise InternalServerError(response.text)
def stac_post(cls, provider_json, url, collection, bbox, datetime=False, cloud_cover=None, page=1, limit=100): logging.info('CollectionsBusiness.stac_post') logging.info('CollectionsBusiness.stac_post - provider_json: %s', provider_json) logging.info('CollectionsBusiness.stac_post - url: %s', url) logging.info('CollectionsBusiness.stac_post - collection: %s', collection) logging.info('CollectionsBusiness.stac_post - bbox: %s', bbox) logging.info('CollectionsBusiness.stac_post - datetime: %s', datetime) logging.info('CollectionsBusiness.stac_post - cloud_cover: %s', cloud_cover) logging.info('CollectionsBusiness.stac_post - page: %s', page) logging.info('CollectionsBusiness.stac_post - limit: %s', limit) data = { 'bbox': bbox.split(','), 'query': { 'collection': { 'eq': collection } }, 'page': page, 'limit': limit } # if cloud_cover is a number and not a boolean if isinstance(cloud_cover, (int, float)) and not isinstance(cloud_cover, bool): data['query']['eo:cloud_cover'] = {'lte': cloud_cover} # if STAC <= '0.7.0' if datetime and (VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0')): data['time'] = datetime elif datetime: # default: STAC >= 0.9.0 data['datetime'] = datetime logging.info('CollectionsBusiness.stac_post - data: %s', data) try: response = StacComposeServices.post_stac_search( provider_json, data) return response except Exception as error: # logging.debug(f'CollectionsBusiness.stac_post - error: \n{error}', error) return None
def search_get(cls, collections, bbox, cloud_cover=False, datetime=False, limit=300, query=None): logging.info('CollectionsBusiness.search') # limit is a string, then I need to convert it limit = int(limit) # if cloud_cover is not False, in other words, it is a string, then I need to convert it if cloud_cover: cloud_cover = float(cloud_cover) logging.info('CollectionsBusiness.search - collections: %s', collections) logging.info('CollectionsBusiness.search - bbox: %s', bbox) logging.info('CollectionsBusiness.search - cloud_cover: %s', cloud_cover) logging.info('CollectionsBusiness.search - datetime: %s', datetime) logging.info('CollectionsBusiness.search - limit: %s', limit) logging.info('CollectionsBusiness.search - query: %s', query) result_dict = {} providers = list(set([p.split(':')[0] for p in collections.split(',')])) logging.info('CollectionsBusiness.search - providers: %s', providers) for provider in providers: logging.info('CollectionsBusiness.search - provider: %s', provider) url = cls.providers_business.get_providers()[provider]['url'] cs = [ c.split(':')[1] for c in collections.split(',') if c.split(':')[0] == provider ] method = cls.providers_business.get_providers_methods()[provider] providers_json = cls.providers_business.get_providers_json( )[provider] logging.info('CollectionsBusiness.search - url: %s', url) logging.info('CollectionsBusiness.search - cs: %s', cs) logging.info('CollectionsBusiness.search - method: %s', method) # if there is not a provider inside the dict, then initialize it if provider not in result_dict: result_dict[provider] = {} if method == 'POST': for collection in cs: logging.info('CollectionsBusiness.search - collection: %s', collection) logging.info('CollectionsBusiness.search - MAX_LIMIT: %s', MAX_LIMIT) # initialize collection result_dict[provider][collection] = None # if 'limit' is less than the maximum I can search, then I can use 'limit' to # search my features just one datetime if limit <= MAX_LIMIT: limit_to_search = limit # if 'limit' is greater than the maximum I can search, then I use the maximum number and I search by pages else: limit_to_search = MAX_LIMIT # if I'm searching by the first, and only one, page [...] result = cls.stac_post(providers_json, url, collection, bbox, datetime, cloud_cover, 1, limit_to_search) # logging.debug(f'CollectionsBusiness.search - result: {result}') # [...] then I add it to the dict directly result_dict[provider][collection] = result result = add_context_field_in_the_feature_collection_if_it_does_not_exist( result, page=1, limit=limit_to_search) result = rename_fields_from_feature_collection(result) found = int(result['context']['matched']) logging.debug('CollectionsBusiness.search - context: %s', result['context']) # if I've already got all features, then I go out of the loop if limit <= MAX_LIMIT or found <= MAX_LIMIT: logging.debug( 'CollectionsBusiness.search - just one request was did' ) continue else: logging.debug( 'CollectionsBusiness.search - more requests are required' ) # if there is more results to get, I'm going to search them by pagination for page in range(2, int(limit / MAX_LIMIT) + 1): logging.info( 'CollectionsBusiness.search - page: %s', page) result = cls.stac_post(providers_json, url, collection, bbox, datetime, cloud_cover, page, limit_to_search) # logging.debug('CollectionsBusiness.search() - result: %s', result) result = add_context_field_in_the_feature_collection_if_it_does_not_exist( result, page=page, limit=limit_to_search) result = rename_fields_from_feature_collection( result) # if I'm on other page, then I increase the old result result_dict[provider][collection][ 'features'] += result['features'] result_dict[provider][collection]['context'][ 'returned'] += result['context']['returned'] # logging.debug('CollectionsBusiness.search() - result_dict[provider][collection]: %s', result_dict[provider][collection]) # get matched variable based on 'result_dict[provider][collection]['context']['matched']' context = result_dict[provider][collection]['context'] matched = int(context['matched']) logging.info( 'CollectionsBusiness.search - context: %s', context) # if something was found, then fill 'limit' key with the true limit if matched: context['limit'] = limit # logging.debug('CollectionsBusiness.search - 2 result_dict[provider][collection]: %s', result_dict[provider][collection]) logging.debug( '\n\nCollectionsBusiness.search - the end\n\n') elif method == 'GET': for collection in cs: result = cls.stac_get_items(providers_json, collection, bbox, datetime=datetime, limit=limit) # add the result to the corresponding collection result_dict[provider][collection] = result else: raise BadRequest('Unexpected provider: {}'.format(provider)) return result_dict
def post_search(cls, providers, bbox, datetime, limit=MAX_LIMIT): logging.info('StacBusiness.post_search\n') logging.info('StacBusiness.post_search - MAX_LIMIT: %s\n', MAX_LIMIT) logging.info('StacBusiness.post_search - providers: %s', providers) logging.info('StacBusiness.post_search - bbox: %s', bbox) logging.info('StacBusiness.post_search - datetime: %s', datetime) logging.info('StacBusiness.post_search - limit: %s\n', limit) providers_json = cls.providers_business.get_providers_json() logging.info('StacBusiness.post_search - providers_json: %s\n', providers_json) result_dict = {} # check if there is a provider that does not exist before requesting features for provider in providers: if provider['name'] not in providers_json: raise BadRequest('Provider `{}` does not exist.'.format(provider['name'])) for provider in providers: logging.info('StacBusiness.post_search - provider:') # destructuring dictionary contents into variables provider_name, method, collections, query = destructuring_dict(provider, 'name', 'method', 'collections', 'query') provider_json = providers_json[provider_name] logging.info('StacBusiness.post_search - provider_name: %s', provider_name) logging.info('StacBusiness.post_search - method: %s', method) logging.info('StacBusiness.post_search - collections: %s', collections) logging.info('StacBusiness.post_search - query: %s', query) logging.info('StacBusiness.post_search - provider_json: %s\n', provider_json) # if there is not a provider inside the dict, then initialize it if provider_name not in result_dict: result_dict[provider_name] = {} if provider_json['filter_mult_collection']: logging.info('StacBusiness.post_search - filter_mult_collection == True\n') limit_to_search = get_limit_to_search(limit) feature_collection = cls.stac_search(method, provider_json, collections, bbox, datetime, query, 1, limit_to_search) # logging.info('StacBusiness.post_search() - feature_collection: %s', feature_collection) # an error has been raised if 'status_code' in feature_collection and 'description' in feature_collection: result_dict[provider_name] = feature_collection continue logging.info('StacBusiness.post_search - context: %s', feature_collection['context']) # remove `meta` key from dict metadata_related_to_collections = feature_collection['context'].pop('meta') logging.info('StacBusiness.post_search - metadata_related_to_collections: %s', metadata_related_to_collections) # remove `features` key from dict features = feature_collection.pop('features') logging.info('StacBusiness.post_search() - collections:') for collection in collections: collection_name = collection['name'] logging.info('StacBusiness.post_searc() - collection_name: %s', collection_name) result = list(filter(lambda f: f['collection'] == collection_name, features)) metadata = list(filter(lambda f: f['name'] == collection_name, metadata_related_to_collections)) logging.info('StacBusiness.post_search - metadata: %s', metadata) fc_structure = deepcopy(feature_collection) fc_structure['features'] = result fc_structure['context']['matched'] = metadata[0]['context']['matched'] fc_structure['context']['returned'] = metadata[0]['context']['returned'] # add the found collection to the result result_dict[provider_name][collection_name] = fc_structure else: logging.info('StacBusiness.post_search - filter_mult_collection == False\n') logging.info('StacBusiness.post_search - collections:') for collection in collections: collection_name = collection['name'] logging.info('StacBusiness.post_search - collection_name: %s', collection_name) limit_to_search = get_limit_to_search(limit) # first: I'm searching by the first page result = cls.stac_search(method, provider_json, collection_name, bbox, datetime, query, 1, limit_to_search) # logging.info('StacBusiness.post_search() - result: %s', result) logging.info('StacBusiness.post_search - context: %s', result['context']) # if I've already got all features, then I go out of the loop if limit <= MAX_LIMIT or result['context']['matched'] <= MAX_LIMIT: logging.info('StacBusiness.post_search - just one request was did') # if there is more features to get, then I search by them else: logging.info('StacBusiness.post_search - more requests are required') result = cls.stac_search_by_pagination( result, method, provider_json, collection_name, bbox, datetime, query, limit, limit_to_search ) # add the found collection to the result result_dict[provider_name][collection_name] = result # logging.debug('StacBusiness.post_search() - result_dict: %s', result_dict) return result_dict
def stac_get_items(cls, provider_json, collection, bbox, datetime=None, cloud_cover=None, limit=300): logging.info('CollectionsBusiness.stac_get_items\n') logging.info('CollectionsBusiness.stac_get_items - provider_json: %s', provider_json) logging.info('CollectionsBusiness.stac_get_items - collection: %s', collection) logging.info('CollectionsBusiness.stac_get_items - bbox: %s', bbox) logging.info('CollectionsBusiness.stac_get_items - datetime: %s', datetime) logging.info('CollectionsBusiness.stac_get_items - cloud_cover: %s', cloud_cover) logging.info('CollectionsBusiness.stac_get_items - limit: %s', limit) url = provider_json['url'] logging.info('CollectionsBusiness.stac_get_items - url: %s', url) if isinstance(bbox, str): query = 'bbox={}'.format(bbox) elif isinstance(bbox, list): bbox = ",".join(list(map(str, bbox))) query = 'bbox={}'.format(bbox) else: raise BadRequest( '`bbox` field is invalid: `{0}`, it should be a string or list, but its type is {1}.' .format(bbox, type(bbox))) # if STAC <= '0.7.0' if datetime and (VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0')): query += '&time={}'.format(datetime) elif datetime: # default: STAC >= 0.9.0 query += '&datetime={}'.format(datetime) if cloud_cover: query += '&eo:cloud_cover=0/{}'.format(cloud_cover) query += '&limit={}'.format(limit) logging.info('CollectionsBusiness.stac_get_items - query: %s', query) try: response = StacComposeServices.get_collections_collection_id_items( url, collection, query) # logging.debug('CollectionsBusiness.stac_get_items - before post processing - response: %s', response) # post processing to rename fields and add field is it is necessary response = add_context_field_in_the_feature_collection_if_it_does_not_exist( response, page=1, limit=limit) response = rename_fields_from_feature_collection(response) # logging.debug('CollectionsBusiness.stac_get_items - after post processing - response: %s', response) return response except HTTPException as error: logging.debug( 'CollectionsBusiness.stac_get_items - HTTPException.error: %s', error) return create_new_feature_collection(limit=limit, error=error)
def get_stac_search(cls, provider_json, collection, bbox, datetime, query=None, page=1, limit=MAX_LIMIT): """GET /stac/search""" logging.info('StacBusiness.get_stac_search\n') logging.info('StacBusiness.get_stac_search - provider_json: %s', provider_json) logging.info('StacBusiness.get_stac_search - collection: %s', collection) logging.info('StacBusiness.get_stac_search - bbox: %s', bbox) logging.info('StacBusiness.get_stac_search - datetime: %s', datetime) logging.info('StacBusiness.get_stac_search - query: %s', query) logging.info('StacBusiness.post_stac_sget_stac_searchearch() - page: %s', page) logging.info('StacBusiness.get_stac_search - limit: %s', limit) search_collection_as_property = provider_json['search_collection_as_property'] logging.info('StacBusiness.get_stac_search - url: %s', provider_json['url']) logging.info('StacBusiness.get_stac_search - search_collection_as_property: %s\n', search_collection_as_property) parameters = [] if isinstance(bbox, str): parameters.append('bbox={}'.format(bbox)) elif isinstance(bbox, list): bbox = ",".join(list(map(str, bbox))) parameters.append('bbox={}'.format(bbox)) else: raise BadRequest('`bbox` field is invalid: `{0}`, it should be a string or list, but its type is {1}.'.format(bbox, type(bbox))) parameters.append('collections={}'.format(collection)) # if STAC <= '0.7.0' if datetime and (VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0')): parameters.append('time={}'.format(datetime)) elif datetime: # default: STAC >= 0.9.0 parameters.append('datetime={}'.format(datetime)) # if cloud_cover: # query += '&eo:cloud_cover=0/{}'.format(cloud_cover) parameters.append('page={}'.format(page)) parameters.append('limit={}'.format(limit)) parameters = "&".join(parameters) logging.info('StacBusiness.get_stac_search - parameters: %s', parameters) response = StacComposeServices.get_stac_search(provider_json, parameters) # post processing to add field and rename other ones if it is necessary response = add_context_field_in_the_feature_collection_if_it_does_not_exist(response, page=page, limit=limit) response = rename_fields_from_feature_collection(response) # logging.info('StacBusiness.get_stac_search - response: %s', response) return response
def post_stac_search(cls, provider_json, collections, bbox, datetime=False, query=None, page=1, limit=MAX_LIMIT): """POST /stac/search""" logging.info('StacBusiness.post_stac_search\n') logging.info('StacBusiness.post_stac_search - provider_json: %s', provider_json) logging.info('StacBusiness.post_stac_search - collections: %s', collections) logging.info('StacBusiness.post_stac_search - type(collections): %s', type(collections)) logging.info('StacBusiness.post_stac_search - bbox: %s', bbox) logging.info('StacBusiness.post_stac_search - datetime: %s', datetime) logging.info('StacBusiness.post_stac_search - query: %s', query) logging.info('StacBusiness.post_stac_search - page: %s', page) logging.info('StacBusiness.post_stac_search - limit: %s', limit) search_collection_as_property = provider_json['search_collection_as_property'] logging.info('StacBusiness.post_stac_search - url: %s', provider_json['url']) logging.info('StacBusiness.post_stac_search - search_collection_as_property: %s\n', search_collection_as_property) data = { "bbox": bbox, "page": page, "limit": limit } if query is not None: data["query"] = query # if STAC <= '0.7.0' if datetime and (VersionInfo.parse(provider_json['version']) <= VersionInfo.parse('0.7.0')): data['time'] = datetime elif datetime: # default: STAC >= 0.9.0 data['datetime'] = datetime # if `collections` is a list of collection, then I join all collections to send them if isinstance(collections, list): # `collections` is something like this `[{"name": "CBERS4_AWFI_L4_DN"}, ...]`` # then the `map` function returns a list of collections (strings), for example: `['CBERS4_AWFI_L4_DN', ...]` # thus the `join` function joins the list of strings # data["collections"] = ','.join(list(map(lambda c: c['name'], collections))) data["collections"] = list(map(lambda c: c['name'], collections)) # if `collections` is a string, in other words, is just one collection, then I just send it elif isinstance(collections, str): # if STAC supports just to search collection as property, then add it inside query if search_collection_as_property: # if `data` does not have `query` field, then initialize it if "query" not in data: data["query"] = {} data["query"]["collection"] = { "eq": collections } # else it searchs as STAC standard else: data["collections"] = [collections] else: raise BadRequest('`collections` must be instance of `list` or `str`.') logging.info('StacBusiness.post_stac_search - data: %s\n', data) response = StacComposeServices.post_stac_search(provider_json, data) # an error has been raised if 'status_code' in response and 'description' in response: return response # logging.debug('StacBusiness.post_stac_search - \n\n(1) response: %s\n\n', response) # post processing to rename fields and add field is it is necessary response = add_context_field_in_the_feature_collection_if_it_does_not_exist(response, page=page, limit=limit) response = rename_fields_from_feature_collection(response) # logging.info('StacBusiness.post_stac_search - \n\n(2) response: %s\n\n', response) return response