def get_items(srch: Search, limit: bool) -> Generator[Tuple[dict, str, bool], None, None]: if limit: items = srch.items(limit=limit) else: items = srch.items() for metadata in items.geojson()["features"]: uri, relative = guess_location(metadata) yield (metadata, uri, relative)
def get_items( srch: Search, limit: Optional[int] ) -> Generator[Tuple[dict, str, bool], None, None]: if limit: items = srch.items(limit=limit) else: items = srch.items() # Workaround bug in STAC Search that doesn't stop at the limit for count, metadata in enumerate(items.geojson()["features"]): # Stop at the limit if it's set if (limit is not None) and (count > limit): break uri, relative = guess_location(metadata) yield (metadata, uri, relative)
def search_satellite_data(coordinates, cloud_cover_lt, product="landsat"): """ coordinates: bounding box's coordinates cloud_cover_lt: maximum cloud cover product: landsat, sentinel """ if product == "landsat": product = "landsat-8-l1" elif product == "sentinel": product = "sentinel-2-l1c" search = Search( bbox=get_tiny_bbox(coordinates), query={ "eo:cloud_cover": {"lt": cloud_cover_lt}, "collection": {"eq": product}, }, sort=[{"field": "eo:cloud_cover", "direction": "asc"}], ) # improvement: filter by date, cloud cover here search_items = search.items() if not len(search_items): exit_cli(print, "No data matched your search, please try different parameters.") # return the first result item = search_items[0] return item
def run(params, url, sleep=None): search = Search(api_url=url, **params) logger.debug(f"Searching {url}") found = search.found() logger.debug(f"Total items found: {found}") if found < MAX_ITEMS_REQUEST: logger.info(f"Making single request for {found} items") items = search.items() for i, item in enumerate(items): resp = SNS_CLIENT.publish(TopicArn=SNS_TOPIC, Message=json.dumps(item._data)) if (i % 500) == 0: logger.debug(f"Added {i+1} items to Cirrus") #if resp['StatusCode'] != 200: # raise Exception("Unable to publish") if sleep: time.sleep(sleep) logger.debug(f"Published {len(items)} items to {SNS_TOPIC}") else: # bisection nbatches = 2 logger.info( f"Too many Items for single request, splitting into {nbatches} batches by date range" ) for params in split_request(params, nbatches): run(params, url)
def search_landsat_data(coordinates, cloud_cover_lt): search = Search( bbox=get_tiny_bbox(coordinates), query={ "eo:cloud_cover": { "lt": cloud_cover_lt }, "collection": { "eq": "landsat-8-l1" }, }, sort=[{ "field": "eo:cloud_cover", "direction": "asc" }], ) # improvement: filter by date, cloud cover here search_items = search.items() if not len(search_items): exit_cli( print, "No data matched your search, please try different parameters.") landsat_item = search_items[0] return landsat_item
def search(geometry: dict, datetime: date) -> object: logger.info('Searching images') kwargs = { 'intersects': geometry, 'datetime': datetime.isoformat(), 'collections': [LANDSAT] } search = Search(url=URL, **kwargs) items = search.items() logger.debug('Found: %d', len(items)) return next(iter(items))
def search_scenes(bbox, collection='landsat-8-l1', cloud_cover=(0, 10)): search = Search(bbox=bbox, query={ 'eo:cloud_cover': { 'gt': cloud_cover[0], 'lt': cloud_cover[1] }, 'collection': { 'eq': collection } }) return search.items()
def retrieveSatelliteImage(min_lon, min_lat, max_lon, max_lat): '''retrieve a satellite image with min and max coordinates as corners''' start_date = '2021-03-25T00:00:00Z' end_date = date.today().strftime('%Y-%m-%dT00:00:00Z') search = Search(bbox=[min_lon, min_lat, max_lon, max_lat], datetime=start_date + '/' + end_date, url='https://earth-search.aws.element84.com/v0') print(search) items = search.items(limit=1) keys = [k for i in items for k in i.assets] filename = items[0].download(keys[0], filename_template='satellite_images/image') return filename
def search_image(date, bb, prop): """Searches Satellite-Image for given Boundingbox, Date and Properties :parameter: singel Date, Bounding Box as List Properties as String :return: statsac.Item Object with the lowest cloud-coverage for the given Date""" image = [] # search image for given date or periode of time, # always takes the first image search = Search(bbox=bb, datetime=date, property=[prop], sort=[{ 'field': 'eo:cloud_cover', 'direction': 'asc' }]) items = search.items() # filter for Landsatimages since Sentinel doesn't work and the # collection option for sat-search seems to be broken workaround = [str(item) for item in items] counter = 0 for z in workaround: if 'S2' in z: counter += 1 pass else: image.append(items[counter]) break # check if image was found assert len(image) == 1, 'No Images for given Parameters found. ' \ 'Please try new ones' return items[counter]
def post(self): body = request.get_json() geom = { "type": "Polygon", "coordinates": [[[-66.3958740234375, 43.305193797650546], [-64.390869140625, 43.305193797650546], [-64.390869140625, 44.22945656830167], [-66.3958740234375, 44.22945656830167], [-66.3958740234375, 43.305193797650546]]] } print(body) cloud = "eo:cloud_cover<" + "5" #body['cloud'] time = "2018-02-01/2018-02-10" #body["time"] search = Search(intersects=geom, time=time, property=[cloud]) items = search.items() print(search.found()) for item in items: print(item.assets["thumbnail"]["href"]) return 200
def search_image(date, bb, prop): """Searches Satellite-Image for given Boundingbox, Date and Properties :parameter: singel Date or range of Days, Bounding Box as List and Properties as String :return: List containing statsac.Item Object with the lowest cloud-coverage for the given Date""" image = [] # search image for given date or periode of time, always takes the first image search = Search(bbox=bb, datetime=date, property=[prop], sort=[{ 'field': 'eo:cloud_cover', 'direction': 'desc' }]) items = search.items() image.append(items[-1]) assert len( image ) == 1, 'No Images for given Parameters found. Please try new ones' return image
def get_satellite_image(coords): """ Generate an image given min and max gps coordinate pairs """ # set the date parameters for searching start_date = '2021-04-07T00:00:00Z' end_date = date.today().strftime('%Y-%m-%dT00:00:00Z') # search for a satellite image match search = Search(bbox=coords, datetime=start_date + '/' + end_date, url='https://earth-search.aws.element84.com/v0') # only record the latest one items = search.items(limit=1) # try to find a better way to do this keys = [k for i in items for k in i.assets] # download the latest one filename = items[0].download(keys[0], filename_template='satellite_images/image') return filename
from satsearch import Search import sys import numpy as np import rasterio as rio from itertools import product from rasterio import windows import matplotlib.pyplot as plt import multiprocessing as mp print("Number of processors: ", mp.cpu_count()) search = Search(bbox=[8.66744, 49.41217, 8.68465, 49.42278], datetime='2018-06-01/2018-08-04', property=["eo:cloud_cover<5"]) items = search.items() item = items[6] def search_image(date, bb, prop): """Searches Satellite-Image for given Boundingbox, Date and Properties :parameter: singel Date or range of Days, Bounding Box as List and Properties as String :return: List containing statsac.Item Object with the lowest cloud-coverage for the given Date""" image = [] # search image for given date or periode of time, always takes the first image search = Search(bbox=bb, datetime=date, property=[prop], sort=[{
maxdate1 = f'{(t+dt).year:04}-{(t+dt).month:02}-{(t+dt).day:02}' geojson = mapping(shape(geojson).buffer(1e-5)) query = { "url": "https://earth-search.aws.element84.com/v0", "intersects": copy.copy(geojson), "query": { "eo:cloud_cover": { "lte": args.maxclouds, "gte": args.minclouds, } }, "datetime": f"{mindate1}/{maxdate1}", "sort": [ { "field": "datetime", "direction": ">", }, ], "collections": ["sentinel-s2-l2a"], "limit": args.limit, } search = Search(**query) if search.found() > 0: print(f'*date={t} points={len(ps)}') items = search.items(limit=args.limit) items.save(f'{year:04}_{month:02}_{day:02}.result') else: print(f' date={t} points={len(ps)}')
search = Search.search( property=["eo:cloud_cover<10", "collection=landsat-8-l1"]) print('%s items found' % search.found()) # or use collection shortcut search = Search.search(collection='landsat-8-l1', property=["eo:cloud_cover<10"]) print('%s items found' % search.found()) #search = Search(bbox=[-110, 39.5, -105, 40.5], # datetime='2018-02-01/2018-02-04', # property=["eo:cloud_cover<5"]) #print('%s items' % search.found()) items = search.items() print('%s items' % len(items)) print('%s collections' % len(items._collections)) print(items._collections) #for item in items: #print(item.properties) items[1].save('test.json') items2 = Item.load('test.json') print(items2.summary(['date', 'id', 'eo:cloud_cover'])) # download a specific asset from all items and put in a directory by date in 'downloads' filenames = items.download('MTL', path='downloads/${date}')
def query_rf(features, refresh_token, limit=800, minclouds=0.0, maxclouds=20.0, mindate=['1307-10-13'], maxdate=['2038-01-19'], scale=None, original_shape=False): limit = min(limit, 800) def convert_and_scale(f): tmp = shapely.geometry.shape(f.get('geometry')) if scale is not None: tmp = shapely.affinity.scale(tmp, scale, scale) return tmp feature = list(map(convert_and_scale, features.get('features'))) shape = shapely.ops.cascaded_union(feature) if original_shape: scale = None original_shape = shapely.ops.cascaded_union( list(map(convert_and_scale, features.get('features')))) aoi_shape = original_shape else: aoi_shape = shape sentinel_scenes = { 'results': [], 'aoi': shapely.geometry.mapping(aoi_shape) } rf_shape = shapely.geometry.mapping(shape) for (mindate1, maxdate1) in zip(mindate, maxdate): geo_filter = { "url": "https://earth-search.aws.element84.com/v0", "intersects": copy.copy(rf_shape), "query": { "eo:cloud_cover": { "lte": maxclouds, "gte": minclouds, } }, "datetime": f"{mindate1}/{maxdate1}", "sort": [ { "field": "datetime", "direction": ">", }, # { # "field": "eo:cloud_cover", # "direction": "<", # }, ], "collections": ["sentinel-s2-l2a"], "limit": limit, } search = Search(**geo_filter) for item in list(search.items(limit=limit))[0:limit]: tiles = item.assets.get("B01").get("href") tiles = tiles[tiles.find("/tiles") + 1:] # get "/tiles/..." tiles = tiles[:-8] # remove "/B01.jp2" from the end if tiles.endswith("/R60m"): tiles = tiles[:-5] result = { "dataFootprint": item.geometry, "createdAt": item.properties.get("datetime"), "name": item.properties.get("sentinel:product_id"), "sceneMetadata": { "cloudyPixelPercentage": item.properties.get("eo:cloud_cover"), "path": tiles, }, } sentinel_scenes['results'].append(result) return sentinel_scenes
def downloading_scenes(self): """ Делает запрос по API на загрузку снимков на основе выбранных параметров. """ self.clearing_landsat8_category() self.clear_filekeys() if not os.path.exists(self.dlg.folderPath_lineEdit.text()): self.dlg.logWindow.appendPlainText("["+str(datetime.datetime.now().strftime ("%H:%M:%S")) + "]" +" Введите корректный путь к директории!") return None SATTELITE_NAME = str(self.dlg.satelliteName_comboBox.currentText()) searching_collection_name = None if SATTELITE_NAME == "Landsat-8": searching_collection_name = "landsat-8-l1" elif SATTELITE_NAME == "Sentinel-2": searching_collection_name = "sentinel-2-l1c" CLOUD_FROM = str(self.dlg.cloudFrom_spinBox.value()) CLOUD_TO = str(self.dlg.cloudTo_spinBox.value()) DATE_FROM = str(self.dlg.dateEdit.date().toPyDate()) DATE_TO = str(self.dlg.dateEdit_2.date().toPyDate()) query = { 'eo:cloud_cover': { 'lte' : CLOUD_TO, 'gte' : CLOUD_FROM } } if searching_collection_name: query['collection'] = {'eq' : searching_collection_name} intersects_geojson_data = self.buildGeoJSON() date_param_string = f"{DATE_FROM}/{DATE_TO}" if searching_collection_name == "landsat-8-l1": self.check_landsat8_filekeys() elif searching_collection_name == "sentinel-2-l1c": self.check_sentinel2_filekeys() items = [] if searching_collection_name == "landsat-8-l1": landsat_tiers = self.checking_landsat8_category() for tier in landsat_tiers: query['landsat:tier'] = {"eq" : tier} if intersects_geojson_data is not None: search = Search(intersects=intersects_geojson_data, time=date_param_string, query=query) else: search = Search(time=date_param_string, query=query) items += search.items() else: if intersects_geojson_data is not None: search = Search(intersects=intersects_geojson_data, time=date_param_string, query=query) else: search = Search(time=date_param_string, query=query) items = search.items() PATH = str(self.dlg.folderPath_lineEdit.text()) self.dlg.logWindow.appendPlainText("["+str(datetime.datetime.now().strftime ("%H:%M:%S")) + "]" +" Файлы будут загружены в директорию: " + PATH) self.dlg.logWindow.appendPlainText("["+str(datetime.datetime.now().strftime ("%H:%M:%S")) + "]" +" К загрузке представлено сцен - " + str(len(items))) self.dlg.logWindow.appendPlainText("["+str(datetime.datetime.now().strftime ("%H:%M:%S")) + "]" +" Каналы (файлы) к загрузке - " + ", ".join(FILEKEYS)) self.dlg.stopDownloadingButton.setEnabled(True) self.dlg.interruptingButton.setEnabled(True) self.dlg.downloadScenesButton.setEnabled(False) self.worker = DownloadWorker() self.worker.scenes = items self.worker.filekeys = FILEKEYS self.worker.path = PATH self.worker.work_started.connect(self.work_is_starting) self.worker.data_downloaded.connect(self.download_ready) self.worker.work_finished.connect(self.work_ready) try: self.worker.start() except: self.stop_worker()
def get_files(self, instance): bands = instance.bands # geocoding # place = self.context['location'] # response = requests.get(f"https://nominatim.openstreetmap.org/search?q={place}&format=json&polygon_geojson=1&addressdetails=0&limit=1") # json_object = response.json() # bb = json_object[0]['boundingbox'] # bbx = [float(x) for x in bb] # aws configuration url = 'https://earth-search.aws.element84.com/v0' # URL to Sentinel 2 AWS catalog collection = 'sentinel-s2-l2a-cogs' # aws search parameter startDate = self.context['date_from'] endDate = self.context['date_to'] # location = [ # 11.40020, # 53.63612, # 11.44569, # 53.62385 # ] bbox_search = Search( bbox=self.context['location'], datetime=startDate + "/" + endDate, query={ 'eo:cloud_cover': { 'lt': 50 }, 'sentinel:data_coverage': { 'gt': 80 } }, collections=[collection], url=url, sortby='+properties.eo:cloud_cover', ) items = bbox_search.items() downloads = {} for i, item in enumerate(items): data = {} data['Product ID'] = item.properties["sentinel:product_id"] data['Preview'] = item.asset("thumbnail")["href"] data['Date'] = item.properties["datetime"] data['Cloud cover'] = item.properties["eo:cloud_cover"] data['Latitude band'] = item.properties["sentinel:latitude_band"] data['Grid'] = item.properties["sentinel:grid_square"] data['UTM Zone'] = item.properties["sentinel:utm_zone"] data['Sequence'] = item.properties["sentinel:sequence"] data['Projection'] = item.properties["proj:epsg"] data['Data coverage'] = item.properties["sentinel:data_coverage"] for band in bands.split(','): data[band] = item.asset(band)["href"] downloads[i] = data df = pd.DataFrame.from_dict(downloads, orient='index') df = df.drop_duplicates('Grid') results = df.to_dict(orient='index') return results
except: print("Tile size must be integer values!") sys.exit(1) #check how much time the process took time_start = time() #search and download satellite images search1 = Search( bbox=[str(long_left), str(lat_left), str(long_right), str(lat_right)], datetime=date1, property=["eo:cloud_cover<5"]) items1 = search1.items() search2 = Search( bbox=[str(long_left), str(lat_left), str(long_right), str(lat_right)], datetime=date2, property=["eo:cloud_cover<5"]) items2 = search2.items() #Loop through the items list and search for landsat scene # as sentinel scenes throws an error message for item in items1: item_prop = item.properties["collection"] if item_prop.startswith("landsat"): item1_band4_url = item.assets["B4"]["href"] #red