def get_sentinel(**kwargs): """ Download overlapping sentinel2 imagery for NDSI computation. """ image_date = kwargs.get("image_date") root_dir = kwargs.get("figdir") footprint = read_geojson(path.join(root_dir, kwargs.get("footprint"))) s2path = get_sentinel_image( footprint, image_date, kwargs.get("search_window"), root_dir, choose=kwargs.get("chooser"), ) if kwargs.get("ndsi"): if kwargs.get("clip"): clip = fiona.open(path.join(root_dir, kwargs.get("footprint"))) else: clip = None compute_ndsi( s2path, projection=kwargs.get("reproject"), threshold=kwargs.get("ndsi_threshold"), clip=clip, )
def getS1Data(geojson, max_size): # search by polygon, time, and SciHub query keywords footprint = geojson_to_wkt(read_geojson(geojson)) date = time.strftime("%Y%m%d") yestdate = str(int(date) - 1) products = api.query(footprint, (yestdate, date), platformname='Sentinel-1', producttype='GRD', sensoroperationalmode='IW') results = [] if len(products) == 0: print("No files found at date: " + date) quit() print("Found", len(products), "Sentinel-1 images.") products_df = api.to_dataframe(products).sort_values('size', ascending=False) for i in range(len(products_df)): product_size = int(api.get_product_odata(products_df.index[i])["size"]) if (product_size < max_size): results.append(products_df.index[i]) print "Name:", api.get_product_odata(products_df.index[i])["title"], "size:", \ str(product_size / 1000000), "MB. |ADDED|" else: print "Name:", api.get_product_odata(products_df.index[i])["title"], "size:", \ str(product_size / 1000000), "MB. |TOO BIG|" return results
def ObtainProduct_sentinel(JsonFile, Sdate, Edate, Cldmin, Cldmax): # This will extract the sentinnel product and stored in Dictionary as SceneID and Date when Image collected # JsonFile = Name of the geojson file of Area of Interest # Sdate = Start date (eg. 20150101 YYYYMMDD) # Edate = End Date (eg. 20160101 YYYYMMDD) CldCoverrange = '[' + str(Cldmin) + ' TO ' + str(Cldmax) + ']' footprints = geojson_to_wkt(read_geojson(JsonFile)) #print footprints products = api.query(footprints, (Sdate, Edate), platformname='Sentinel-2', cloudcoverpercentage=CldCoverrange) a = api.to_geodataframe(products) #print a # Collect all the data available and its date and product ID Result = {'Pr_Id': [], 'Datetime': [], 'Geometry': [], 'Identifier': []} if len(products) == 0: print 'No scene available in given condition' else: print 'Found ' + str( len(products) ) + ' Scene\n', "Here is your list of Dates available and its product id" counter = 0 for i in xrange(len(a)): # in products: Result['Pr_Id'].append(a['uuid'][i]) Result['Datetime'].append(a['beginposition'][i]) Result['Geometry'].append(a['geometry'][i]) Result['Identifier'].append(a['identifier'][i]) print counter, ' ==> ', a['beginposition'][i], ' ==> ', a['uuid'][ i], ' ==> ', a['cloudcoverpercentage'][i] counter += 1 return Result
def Sen2Download(self, dprofile): download_profile_args = [ dprofile.username, dprofile.password, dprofile.daysdiff, dprofile.shape_file_path, dprofile.download_dir, dprofile.concurrency, ] username, password, daysdiff, shape_file, directory_path, concurrency = download_profile_args logger.info( f'Sentinel-1 Downloads starting with dprofile = {dprofile}') api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') #shapefileto wkt footprint = geojson_to_wkt(read_geojson(shape_file)) #dates to search end_date = datetime.datetime.now() daysdiff = datetime.timedelta(days=daysdiff) start_date = end_date - daysdiff #Search for data products = api.query(footprint, date=(start_date, end_date), platformname='Sentinel-2', producttype='S2MSI1C', cloudcoverpercentage=(0, 30)) self.DownloadProducts(self, products, dprofile)
def iterate_geojson_job(directory, directory2, api): for subdir in listdir_nohidden(directory): for filename in os.listdir(subdir): if filename.endswith(".geojson"): #print("subdir: " + subdir) filename2 = os.path.join(subdir, filename) #print("filename2: " + filename2) subdir_min = subdir.replace(directory, '') filename_min = filename.replace('.geojson', '') # print(subdir_min) # print(filename_min) # For Sentinel 1 Data if re.search('s1a', subdir_min, re.IGNORECASE): scene_dir = os.path.join(directory2, 'S1A', 'S1A_' + filename_min) elif re.search('s1b', subdir_min, re.IGNORECASE): scene_dir = os.path.join(directory2, 'S1B', 'S1B_' + filename_min) #If GeoJSON folder does not exist if not os.path.exists(scene_dir): os.makedirs(scene_dir) print(scene_dir + " folder created.") footprint = geojson_to_wkt(read_geojson(filename2)) products = api.query(footprint, date=("NOW-2DAY", "NOW"), platformname='Sentinel-1')
def download_scenes(period): date_from = period.date_from date_to = period.date_to # Check if result has already been done scene_filename = 's1_{dfrom}_{dto}.tif'.format( dfrom=period.date_from.strftime('%Y%m'), dto=period.date_to.strftime('%Y%m')) scene_path = os.path.join(RESULTS_PATH, scene_filename) if os.path.exists(scene_path): print( "Sentinel-1 mosaic for period {}-{} already done:".format( date_from, date_to), scene_path) return # Prepare API client for download api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS, settings.SCIHUB_URL) # Query scenes footprint = geojson_to_wkt(read_geojson(AOI_PATH)) products = api.query(footprint, date=(date_from, date_to), platformname='Sentinel-1', producttype='GRD', polarisationmode='VV VH', orbitdirection='ASCENDING') for k, p in products.items(): print((k, p['summary'])) os.makedirs(S1_RAW_PATH, exist_ok=True) # Filter already downloaded products products_to_download = { k: v for k, v in products.items() if not os.path.exists( os.path.join(S1_RAW_PATH, '{}.zip'.format(v['title']))) } # Download products results = api.download_all(products_to_download, directory_path=S1_RAW_PATH) products = list(products.values()) # Process the images of each product with mp.Pool(settings.S1_PROC_NUM_JOBS) as pool: pool.map(process_product, products) # Create a median composite from all images of each band, generate extra # bands and concatenate results into a single multiband imafge. superimpose(products) median(products, period) generate_vvvh(period) concatenate_results(period) clip_result(period) clean_temp_files(period)
def read_band(filename, get_coordinates=False): """ Crop the file `filename` with a polygon mask. Images with spatial resolution = 10m are cropped by real shape of chosen location. Images with spatial resolution = 20m or 60m cover a rectangle bigger than chosen location avoiding data loss. Tranformed to reflectance dividing by 10000, only for Sentinel-2 bands. :param filename: Input filename (jp2, tif). """ if any(band in filename for band in ['B02', 'B03', 'B04', 'B08', 'classification', 'TCI']): spatial_resolution = 10 geojson = paths.main_geojson elif any(band in filename for band in ['B01', 'B09']): spatial_resolution = 60 geojson = paths.rectangle_geojson else: spatial_resolution = 20 geojson = paths.rectangle_geojson footprint = read_geojson(geojson) # load the raster, mask it by the polygon and crop it with rasterio.open(filename) as src: shape = project_shape(footprint['features'][0]['geometry'], dcs=src.crs) out_image, out_transform = mask.mask(src, shapes=[shape], crop=True) if any(band in filename for band in ['CLD', 'classification']): out_image = out_image[0, :, :] elif 'TCI' in filename: out_image = np.rollaxis(out_image, 0, 3) else: out_image = out_image[0, :, :] / 10000 out_image[np.isnan(out_image) | np.isinf(out_image)] = 0 if spatial_resolution == 20: out_image = crop_aux( np.repeat(np.repeat(out_image, 2, axis=0), 2, axis=1), paths.slices20) elif spatial_resolution == 60: out_image = crop_aux( np.repeat(np.repeat(out_image, 6, axis=0), 6, axis=1), paths.slices60) if get_coordinates: out_image = (out_image, out_transform) return out_image
def getSentinelData(productID="", footprint=footprint, begin=start_date, end=end_date, platform=platform_name, clouds=cloud_cover_percentage): if productID: api.download(productID) return fp = geojson_to_wkt(read_geojson(footprint)) products = api.query(footprint, date=(begin, end), platformname=platform, cloudcoverpercentage=clouds) api.download_all(products) return
def sentinel1_tile_download(file,username,password,tilepath): api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') td = timedelta(days=60) endDate = datetime.now() startDate = endDate - td footprint = geojson_to_wkt(read_geojson(file)) print(footprint) #products = api.query(footprint, # date=(startDate, endDate),platformname='Sentinel-1') products = api.query(footprint, producttype='SLC', orbitdirection='ASCENDING') # download all results from the search api.download_all(products,directorypath=tilepath) return
def search(self, start='NOW-7DAYS', end='NOW'): self._logger.info('Searching for new data sets') # loading search extend current_dir = os.path.dirname(__file__) extend_path = os.path.join(current_dir, "nrw.geojson") footprint = geojson_to_wkt(read_geojson(extend_path)) try: return self.__api.query(area=footprint, initial_date=start, end_date=end, platformname='Sentinel-1', producttype='GRD') except SentinelAPIError: self._logger.error('There was an error searching for data sets', exc_info=True) return {}
def main(areapath, outfolder, start_date, end_date): # connect to the API api = SentinelAPI('javy', 'Javy9289', 'https://scihub.copernicus.eu/dhus') # search by polygon, time, and Hub query keywords # footprint = geojson_to_wkt(read_geojson('map.geojson')) # products = api.query(footprint, # date = ('20151219', date(2015, 12, 29)), # platformname = 'Sentinel-2', # cloudcoverpercentage = (0, 30)) footprint = geojson_to_wkt(read_geojson(areapath)) products = api.query(footprint, date = (start_date, end_date), platformname = 'Sentinel-2') # download all results from the search api.download_all(products, outfolder, , checksum=False)
def download(): # search by polygon footprint = geojson_to_wkt( read_geojson(r"C:\Users\Ridene Safa\Desktop\task\task2\map.geojson")) print(footprint) print("Searching") products = api.query( footprint, date=('20200204', '20210206'), platformname='Sentinel-2', cloudcoverpercentage=(0, 30), #filename="*TCI_10m*", limit=1) print(len(products)) print("Start downloading...") for i in products: #api.get_product_odata(i) api.download(i, SAVE_FOLDER)
def execute(self, context): log.info(context) log.info("#################") log.info("## DHUS Search ##") log.info('API URL: %s', self.dhus_url) log.info('API User: %s', self.dhus_user) #log.info('API Password: %s', self.dhus_pass) log.info('Start Date: %s', self.startdate) log.info('End Date: %s', self.enddate) log.info('Filter Max: %s', self.filter_max) log.info('Order By: %s', self.order_by) log.info('GeoJSON: %s', self.geojson_bbox) log.info('Keywords: %s', self.keywords) log.info('Now is: {}'.format(datetime.now())) log.info('6 hours ago was: {}'.format(datetime.now() - timedelta(hours=6))) print("Execute DHUS Search.. ") # search products api = SentinelAPI(self.dhus_user, self.dhus_pass, self.dhus_url) try: footprint = geojson_to_wkt(read_geojson(self.geojson_bbox)) except: log.error('Cannot open GeoJSON file: {}'.format(self.geojson_bbox)) return False products = api.query(area=footprint, date=(self.startdate, self.enddate), order_by=self.order_by, limit=self.filter_max, **self.keywords) log.info("Retrieving {} products:".format(len(products))) products_summary = "\n" for key, product in products.items(): products_summary += 'ID: {}, {}\n'.format(key, product['summary']) log.info(products_summary) context['task_instance'].xcom_push(key='searched_products', value=products) return products
def download_job(directory, directory2, api): for filename in os.listdir(directory): if filename.endswith(".geojson"): filename2 = os.path.join(directory, filename) print(filename2) os.chdir(directory2 + filename) footprint = geojson_to_wkt(read_geojson(filename2)) products = api.query(footprint, date=("NOW-1HOUR", "NOW"), platformname='Sentinel-2') # for product in products: # #print(product) # for columns in products: # print(columns) # for rows in products[columns]: # print (rows,':',products[columns][rows]) # #odata_prod = api.get_product_odata(product) products_df = api.to_dataframe(products) # #products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) # #products_df_sorted = products_df_sorted.head(5) # products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) # products_df_sorted = products_df_sorted.head(2) now_time = datetime.datetime.now() products_df.to_csv("testy.csv") new_csv_name = str(now_time) + '.csv' os.rename('testy.csv', new_csv_name) api.download_all(products) # complete_name = os.path.join(directory2+filename, "testy.txt") # file1 = open(complete_name, "w") # toFile = str(products) # file1.write(toFile) # file1.close() print('Files last updated on ' + str(now_time))
def read_aoi(self, aoi): """Read AOI from GeoJSON file or directly from WKT. :param aoi: area of interest (WKT) :return str: WKT string """ if re.search('Polygon\s*(.*)', aoi, re.IGNORECASE): # Fedeo is very pendatic, polygon must be uppercase return aoi.upper().replace('POLYGON ', 'POLYGON') try: # could be replaced by geojson + shapely from sentinelsat.sentinel import geojson_to_wkt, read_geojson except ImportError as e: Logger.critical("{} processor: {}".format(self.identifier, e)) return None # GeoJSON return geojson_to_wkt(read_geojson(aoi))
def sat_query_job(footprint, api, satnum, tile_num): footprint_rd = geojson_to_wkt(read_geojson(footprint)) raw_query = '' if df.file_name is not None: raw_query = raw_query + 'filename:%s AND ' % df.file_name if df.product_type is not None: raw_query = raw_query + 'producttype:%s AND ' % df.product_type if df.platform_name is not None: raw_query = raw_query + 'platformname:%s AND ' % df.platform_name # if df.orbit_direction is not None: # raw_query = raw_query + 'orbitdirection:%s AND ' % df.orbit_direction if df.polarisation_mode is not None: raw_query = raw_query + 'polarisationmode:%s AND ' % df.polarisation_mode if df.cloud_cover_percentage is not None: raw_query = raw_query + 'cloudcoverpercentage:%s AND ' % df.cloud_cover_percentage if df.sensor_operational_mode is not None: raw_query = raw_query + 'sensoroperationalmode:%s AND ' % df.sensor_operational_mode if satnum == 'S1A': raw_query = raw_query + 'filename:S1A* AND ' raw_query = raw_query + 'orbitdirection:Descending AND ' elif satnum == 'S1B': raw_query = raw_query + 'filename:S1B* AND ' raw_query = raw_query + 'orbitdirection:Ascending AND ' raw_query = raw_query[:-5] # search by polygon, time, and SciHub query keywords products = api.query(footprint_rd, date=(df.start_date, df.end_date), raw=raw_query) # print results from the search if df.printProducts: print "%d products found for " % len(products) + tile_num for product in products: print(products[product]['filename']) return products
def query_copernicus_hub(aoi=None, username='******', password='******', hub='https://s5phub.copernicus.eu/dhus', **kwargs): """ Query Copernicus Open access Hub. :param aoi: (str) Geojson Area of interest url :param username: (str) Username to use for API connection :param password: (str) Password to use for API connection :param hub: (str) Url of hub to query :param kwargs: (dict) extra keywords for the api.query function (see https://sentinelsat.readthedocs.io/en/stable/cli.html#sentinelsat) :return: (SentinelAPI, dict) API object and results of query """ # connect to the API api = SentinelAPI(username, password, hub) # query database if aoi is None: products = api.query(**kwargs) else: # convert .geojson file footprint = geojson_to_wkt(read_geojson(aoi)) products = api.query(footprint, **kwargs) # display results print(('Number of products found: {number_product}\n' 'Total products size: {size:.2f} MB\n').format( number_product=len(products), size=sum([ float(products[uuid]['size'][:-3]) for uuid in products.keys() ]))) return api, products
def sen2_json_query(geojson_path, cloud, start_date, end_date, conf): """ Parameters ---------- geojson_path cloud start_date end_date conf Returns ------- A dicitonary of products """ api = SentinelAPI(conf["sen2"]["user"], conf["sen2"]["pass"], 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson(geojson_path)) products = api.query(footprint, platformname='Sentinel-2', cloudcoverpercentage=(0, cloud), date=(start_date, end_date)) return products
parser.add_argument('--geojson', type=str, default="map.geojson", metavar='N', help="footprint") args = parser.parse_args() print("loading id...") data = json.load(open(args.data)) startdate = date(data["startdate"][0], data["startdate"][1], data["startdate"][2]) enddate = date(data["enddate"][0], data["enddate"][1], data["enddate"][2]) print("connecting to sentinel API...") api = SentinelAPI(data["login"], data["password"], 'https://scihub.copernicus.eu/dhus') # search by polygon, time, and SciHub query keywords print("searching...") footprint = geojson_to_wkt(read_geojson(args.geojson)) if args.sentinel == 1: products = api.query(footprint, date=(startdate,enddate), platformname = 'Sentinel-1', producttype = "GRD" ) elif args.sentinel == 2: products = api.query(footprint, date=(startdate,enddate), platformname = 'Sentinel-2' ) print(" product number: ",len(products)) # download all results from the search print("downloading...") api.download_all(products)
def search(user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError( 'Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write( "%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info('%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
def search( user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info( '%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
fecha_hasta = date(2019, 1, 1) root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..') aoi_path = os.path.join(root_path, 'data', 'aoi_4326.geojson') fechas = (fecha_desde, fecha_hasta) # connect to the API api = SentinelAPI(os.getenv("USUARIO"), os.getenv("PASSWORD"), 'https://scihub.copernicus.eu/dhus') # download single scene by known product id #api.download(<product_id>) # search by polygon, time, and Hub query keywords footprint = geojson_to_wkt(read_geojson(aoi_path)) products = api.query(footprint, date=fechas, platformname='Sentinel-2', cloudcoverpercentage=(0, 100)) print(products) # download all results from the search result = api.download_all(products) print(result) # GeoJSON FeatureCollection containing footprints and metadata of the scenes #api.to_geojson(products) # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
def find_sentinel_images(area_of_interest, date_start, date_end, platform_name, user, password, datastore_base_path, download_path, hub_address="https://scihub.copernicus.eu/apihub", area_relation="Intersects", limit_to_tiles=[], other_search_keywords={}, limit_to_scenes=[], download=True, silent=False): def sprint(string): if not silent: print(string) ################################### identifiers = [] products = {} product_paths = [] sprint("Searching for scenes on "+hub_address) sprint(date_start+" - "+date_end) # search by polygon, time, and Hub query keywords file_name = [] if limit_to_tiles: file_name = ["*_" + limit_to_tiles[i] + "_*" for i in range(len(limit_to_tiles))] file_name = file_name + limit_to_scenes if len(file_name) == 0: file_name = "*" elif len(file_name) == 1: file_name = file_name[0] else: file_name = " OR ".join(file_name) file_name = "(" + file_name + ")" footprint = geojson_to_wkt(read_geojson(area_of_interest)) products = _search_on_hub(user, password, hub_address, area=footprint, area_relation=area_relation, date=(date_start, date_end), platformname=platform_name, filename=file_name, **other_search_keywords) products = _remove_duplicate_acquisitions(products) sprint("Found %i scenes" % len(products.keys())) for k in products.keys(): identifiers.append(products[k]["identifier"]) sprint(products[k]["identifier"]) if not download: return list(products.values()) ################################## # Then locate them in the IPT eodata store sprint("Locating scenes in eodata store...") for i, identifier in enumerate(identifiers): path = _search_on_datastore(datastore_base_path, identifier) # If they are not in the IPT eodata store (some S3 images are missing) # then download them and store in the download directory in case they # haven't been downloaded yet. if not path: if products: product = products[list(products.keys())[i]] else: product = _search_on_hub(user, password, hub_address, filename=identifier) if not product: print("Product " + identifier + " does not exist and will not be downloaded!") continue sprint("Scene not found in eodata store, downloading from "+hub_address+"...") path = _download_from_hub(product, download_path, user, password, hub_address, False) if not path: sprint("Could not download...") continue sprint(path) product_paths.append(path) return product_paths
""" import os import geopandas as gpd from sentinelsat.sentinel import SentinelAPI, read_geojson, geojson_to_wkt #Defining the Area of interest (must be GeoJson) workspace = 'C:/...../workspace' AOI= os.path.join (workspace, "AOI.geojson") #Setting connection parameters to the Sentinel Hub server user = '******' ## change this! password = '******' ## change this! api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') #Setting the search footprint using a Geojson file footprint = geojson_to_wkt(read_geojson(AOI)) #Setting the search query parameters products = api.query(footprint, date = ('20200101', '20200331'), platformname = 'Sentinel-2', processinglevel = 'Level-2A', cloudcoverpercentage = (0, 20)) #Printing the number of products found print("The number of products found is: {} " .format (len(products))) #Creating a table with all the product search results products_table = api.to_geodataframe(products) #This part downloads the product(s) in the same folder where your code is located
import numpy as np import zipfile import sys # path to directory where downloaded L2A scenes are stored L2Ascenes_dir = sys.argv[1] # request user credentials for Sentinel Download user = input('Enter your Copernicus Open Access Hub Username: '******'Password: '******'https://scihub.copernicus.eu/dhus') # path to area of interest defined by user on the command line # Must be geojson in WGS84 aoi = sys.argv[2] footprint = geojson_to_wkt( read_geojson(aoi)) # define aoi for search of scenes ro = 65 # set relative orbit number # list dates of L2A data dates = [ x for x in os.listdir(L2Ascenes_dir) if x.startswith("2020") or x.startswith("2019") ] print('L1C Scenes for the following date will be searched and downloaded:') print(dates) # path to folder where downloaded scenes will be stored defined by user on the command line out_dir = sys.argv[3] # iterate over dates and for every date search products, check if there are two available and if they are in # relative orbit 65, if so download them
dto=date_to.strftime("%Y%m%d")) proc_scene_dir = os.path.join(PROC_DIR, period_s) tci_path = os.path.join(proc_scene_dir, 'tci.tif') if os.path.exists(tci_path): logger.info("TCI file already generated at %s", tci_path) return tci_path if not settings.SCIHUB_USER or not settings.SCIHUB_PASS: raise "SCIHUB_USER and/or SCIHUB_PASS are not set. " + \ "Please read the Configuration section on README." api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS, settings.SCIHUB_URL) extent = read_geojson(EXTENT_PATH) footprint = geojson_to_wkt(extent) logger.info( "Query S2MSI2A products with up to %d%% cloud cover from %s to %s", MAX_CLOUD_PERC, date_from, date_to) products = api.query(footprint, date=(date_from, date_to), platformname='Sentinel-2', cloudcoverpercentage=(0, MAX_CLOUD_PERC), producttype='S2MSI2A') logger.info("Found %d products", len(products)) raw_dir = os.path.join(RAW_DIR, period_s) os.makedirs(raw_dir, exist_ok=True) # Filter already downloaded products
geom = feat.GetGeometryRef() print('Geometry of feature 1:', geom) ############################################### # convert the shapefile to geojson ############################################### gjfile = shapefile.split(".")[0] + ".geojson" com = "ogr2ogr -f GeoJSON -t_srs crs:84 " + gjfile + " " + shapefile flag = os.system(com) if flag == 0: print('Shapefile converted to Geojson format: ' + gjfile) else: print('Error converting shaoefile to Geojson') # convert the geojson to wkt for the API search footprint = geojson_to_wkt(read_geojson(gjfile)) # old code to open a geojson file directly # with open(geojsonfile) as f: # polydata = gj.load(f) ############################################### # search the ESA Sentinel data hub ############################################### # set query parameters query_kwargs = { 'area': footprint, 'platformname': 'Sentinel-1', 'producttype': 'GRD', # orbitdirection='ASCENDING'),
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
def cli( user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit, ): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() if user is None or password is None: try: user, password = requests.utils.get_netrc_auth(url) except TypeError: pass if user is None or password is None: raise click.UsageError( "Missing --user and --password. Please see docs " "for environment variables and .netrc support.") api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ["2", "3"]: logger.error("Cloud cover is only supported for Sentinel 2 and 3.") exit(1) search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split("=") for x in query)) if geometry is not None: search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if "Invalid key" in e.msg: logger.error("No product with ID '%s' exists on server", productid) exit(1) else: raise elif name is not None: search_kwargs["identifier"] = name[0] if len( name) == 1 else "(" + " OR ".join(name) + ")" products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) if os.path.isdir(footprints): foot_path = os.path.join(footprints, "search_footprints.geojson") else: foot_path = "search_footprints.geojson" if path == ".": dump_path = os.path.join(os.getcwd(), foot_path) else: dump_path = os.path.join(path, foot_path) with open(dump_path, "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, triggered, failed_downloads = api.download_all( products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"])) else: for product_id, props in products.items(): if uuid is None: logger.info("Product %s - %s", product_id, props["summary"]) else: # querying uuids has no summary key logger.info( "Product %s - %s - %s MB", product_id, props["title"], round(int(props["size"]) / (1024.0 * 1024.0), 2), ) if uuid is None: logger.info("---") logger.info( "%s scenes found with a total size of %.2f GB", len(products), api.get_products_size(products), )
import os import requests import json from sentinelsat.sentinel import read_geojson, geojson_to_wkt from pandas.io.json import json_normalize #set wd os.chdir( 'D:/OneDrive/Documents/Work/CloudFerro/Projects/DIAS_CAP/DataSearch/WRLD_190814' ) # import json geom = geojson_to_wkt(read_geojson('north.geojson')) #set search parameters, change as desired #example S2 args = { 'collection': 'Sentinel2', 'product': 'LEVEL2A', 'startDate': '2019-01-01', 'completionDate': '2019-12-31', #'geometry': geom, 'status': '34' #'status': '31|32' #'status': 'all' } args = { 'collection': 'Sentinel3', 'product': 'LEVEL1',
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
def catalogue_job(directory, directory2, api): for subdir in listdir_nohidden(directory): for filename in os.listdir(subdir): if filename.endswith(".geojson"): #print("subdir: " + subdir) filename2 = os.path.join(subdir, filename) #print("filename2: " + filename2) subdir_min = subdir.replace(directory, '') filename_min = filename.replace('.geojson', '') # print(subdir_min) # print(filename_min) # For Sentinel 1 Data if re.search('s1a', subdir_min, re.IGNORECASE): scene_dir = os.path.join(directory2, 'S1A', 'S1A_' + filename_min) elif re.search('s1b', subdir_min, re.IGNORECASE): scene_dir = os.path.join(directory2, 'S1B', 'S1B_' + filename_min) if not os.path.exists(scene_dir): os.makedirs(scene_dir) print(scene_dir + " folder created.") #print("scene_dir: " + scene_dir) # sys.exit(1) # os.chdir(scene_dir) ### Insert Download List Job HERE ###### footprint = geojson_to_wkt(read_geojson(filename2)) products = api.query(footprint, date=("NOW-1MONTH", "NOW"), platformname='Sentinel-2') # #odata_prod = api.get_product_odata(product) products_df = api.to_dataframe(products) # if initial == 'TRUE': # products_init = products_df # products_out = products_df # #products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) # #products_df_sorted = products_df_sorted.head(5) # products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) # products_df_sorted = products_df_sorted.head(2) now_time = datetime.datetime.now() products_df.to_csv(str(now_time) + '.csv') print(str(now_time) + '.csv created at ' + scene_dir) #api.download('2b506455-5249-480b-833d-268197aaf350') #### COMPARE PRODUCTS INIT AND PRODUCTS DF ##### # df_diff = pd.concat([products_init, products_df]) # df_diff = df_diff.reset_index(drop=True) # df_gpby = df_diff.groupby(list(df_diff.columns)) # idx = [x[0] for x in df_gpby.groups.values() if len(x) == 1] # df_diff.reindex(idx) # df_diff = products_init.merge(products_df, indicator=True, how='outer') # df_diff[df_diff['_merge'] == 'right_only'] # print(df_diff) #df_diff = pd.merge(products_init, products_df, indicator=True, how='outer') #print(df_diff) # both = (df_diff['_merge'] == 'both').sum() # #print (both) # left_only = df_diff.loc[df_diff['_merge'] == 'left_only', products_init.columns] # print ('left_only') # print (left_only) # right_only = df_diff.loc[df_diff['_merge'] == 'right_only', products_df.columns] # print ('right_only') # print (right_only) #### Create new products_diff for products_df - products INIt ###### #### Write products_diff to out_csv file ##### # products_out.append(df_diff) # products_out.to_csv('testy.csv') #df_diff = {} #if initial == 'TRUE': products_diff.to_csv('testy.csv') #### Download products_diff ##### #### Set products init to products df ##### #products_init = products_df # new_csv_name = str(now_time) + '.csv' # os.rename('testy.csv', new_csv_name) #api.download_all(products) # complete_name = os.path.join(directory2+filename, "testy.txt") # file1 = open(complete_name, "w") # toFile = str(products) # file1.write(toFile) # file1.close() #print('Files last updated on ' + str(now_time)) #continue # else: # print('No GeoJSON files are present on the folder') # #sys.exit(1) return scene_dir