def download_error_image(img_date,geo_img,img_id,username,password): ''' After read error file(image_error.txt) you can get image info which you failed from COG Sentinel-2, you can use this info with this function if you have more than 1 image, you can download with for loop. You can find img_date, geo_img and img_id information in image_error.txt file. api,target_image_id=download_error_image(img_date,geo_img,img_id,username,password) api.download(target_image_id,directory_path='.') api.download('7be30c50-31fc-48c4-ab45-fddea9be7877',directory_path='.') if you get error like >> Product 7be30c50-31fc-48c4-ab45-fddea9be7877 is not online. Triggering retrieval from long term archive. Go to https://sentinelsat.readthedocs.io/en/stable/api.html#lta-products username and password should be string ''' api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') day_before =img_date- datetime.timedelta(days=1) day_after =img_date + datetime.timedelta(days=1) footprint = geojson_to_wkt(geo_img) products = api.query(footprint, #date = ('20181219', date(2018, 12, 29)), date=(day_before,day_after), platformname = 'Sentinel-2', ) sat_df=api.to_geodataframe(products) result=sat_df.loc[sat_df['title']==img_id] return api,result.index.values[0]
def get_products(login_json, coordinates, date_start, date_end, download_dir): with open(login_json, 'r') as fp: LOGIN_INFO = json.load(fp) USER_NAME, PASSWORD = list(LOGIN_INFO.values()) # connect to the API api = SentinelAPI(USER_NAME, PASSWORD, 'https://scihub.copernicus.eu/dhus') # define a map polygon geojson = Polygon(coordinates=coordinates) # search by polygon, time, and Hub query keywords footprint = geojson_to_wkt(geojson) dates = (date_start, date_end) # (date(2018, 4, 1), date(2018, 4, 11)) # June to July maps products = api.query( footprint, date=dates, platformname='Sentinel-2', # producttype='S2MSI2A', area_relation='Intersects', # area of interest is inside footprint cloudcoverpercentage=(0, 40)) # download all results from the search api.download_all(products, directory_path=download_dir) # product_id = list(products.keys())[0] # api.download(id=product_id, directory_path=download_dir) # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries return api.to_geodataframe(products)
def test_to_geopandas(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") gdf = api.to_geodataframe(products)
api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') #Setting the search footprint using a Geojson file footprint = geojson_to_wkt(read_geojson(AOI)) #Setting the search query parameters products = api.query(footprint, date = ('20200101', '20200331'), platformname = 'Sentinel-2', processinglevel = 'Level-2A', cloudcoverpercentage = (0, 20)) #Printing the number of products found print("The number of products found is: {} " .format (len(products))) #Creating a table with all the product search results products_table = api.to_geodataframe(products) #This part downloads the product(s) in the same folder where your code is located ## OPTION 1: Download single product api.download('df132697-676e-43ce-b7bd-45211696119f') ## OPTION 2:Download all products download_list = [] for index, row in products_table.iterrows(): download_list.append (row['title']) print ("The following products will be downloaded: {}" . format (download_list)) api.download (row['uuid'])
def download_images(save_imgs, save_rgb, save_tiles, unet_weights, unet_clouds, class_path, class_clouds, poly_path, percentiles_forest, percentiles_clouds, boundsdata): # connect to the API user = '******' password = '******' api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') # search by polygon footprint = geojson_to_wkt(read_geojson(boundsdata)) # search for the images products = api.query( footprint, date=(["NOW-30DAYS", "NOW"]), area_relation='IsWithin', platformname='Sentinel-2', processinglevel='Level-2A', #cloudcoverpercentage = (0, 20) ) print(len(products)) table_names = api.to_geodataframe(products) uuid_names = table_names['uuid'] # names to download name_zip = table_names['title'] #title of image extracted_name = table_names['filename'] # name of folder .SAFE k = 0 # download images for fname in uuid_names: file_dir = save_imgs + '/' + extracted_name[k] if os.path.isdir(file_dir) is False: retval = os.getcwd() os.chdir(save_imgs) print("Downloading data...") api.get_product_odata(fname) api.download(fname) os.chdir(retval) # return to previous directory path_zip_name = save_imgs + '/' + name_zip[k] + '.zip' while not os.path.exists(path_zip_name): time.sleep(1) if os.path.isfile(path_zip_name): # extract files zip_ref = zipfile.ZipFile(path_zip_name, 'r') zip_ref.extractall(save_imgs) zip_ref.close() os.remove(path_zip_name) # remove .zip file print("%s has been removed successfully" % name_zip[k]) path_to_folder = save_imgs + '/' + extracted_name[ k] + '/GRANULE/' # calls the rgb_tiles function dir_save_tiles = save_tiles + '/' + name_zip[k] if os.path.isdir(dir_save_tiles) is False: print('Creating RGB tiles') os.mkdir(dir_save_tiles) rgb_tiles(path_to_folder, save_rgb, dir_save_tiles, name_zip[k]) # calls the application() Unet function save_class_path = class_path + '/' + name_zip[k] if os.path.isdir(save_class_path) is False: print('Applying UNet') os.mkdir(save_class_path) application(dir_save_tiles, unet_weights, save_class_path, percentiles_forest, clouds=0) # merge predicted tiles into one GeoTiff join_tiles(save_class_path, class_path, path_to_folder) print("Tiles merged!") save_class_clouds = class_clouds + '/' + name_zip[k] if os.path.isdir(save_class_clouds) is False: print('Applying UNet clouds') os.mkdir(save_class_clouds) application(dir_save_tiles, unet_clouds, save_class_clouds, percentiles_clouds, clouds=1) # merge the clouds predicted tiles into one GeoTiff join_tiles(save_class_clouds, class_clouds, path_to_folder) print("Clouds tiles merged!") # polygons evalutation print("Polygons evaluation") polygons(name_zip[k], class_path, class_clouds, path_to_folder, save_class_path, save_imgs, poly_path, time_spaced=None) k = k + 1 else: raise ValueError("%s isn't a file!" % path_zip_name) else: path_to_folder = save_imgs + '/' + extracted_name[k] + '/GRANULE/' # calls the rgb_tiles function dir_save_tiles = save_tiles + '/' + name_zip[k] if os.path.isdir(dir_save_tiles) is False: print('Creating RGB tiles') os.mkdir(dir_save_tiles) rgb_tiles(path_to_folder, save_rgb, dir_save_tiles, name_zip[k]) # calls the application() Unet function save_class_path = class_path + '/' + name_zip[k] if os.path.isdir(save_class_path) is False: print('Applying UNet') os.mkdir(save_class_path) application(dir_save_tiles, unet_weights, save_class_path, percentiles_forest, clouds=0) # merge predicted tiles into one GeoTiff join_tiles(save_class_path, class_path, path_to_folder) print("Tiles merged!") save_class_clouds = class_clouds + '/' + name_zip[k] if os.path.isdir(save_class_clouds) is False: print('Applying UNet clouds') os.mkdir(save_class_clouds) application(dir_save_tiles, unet_clouds, save_class_clouds, percentiles_clouds, clouds=1) # merge the clouds predicted tiles into one GeoTiff join_tiles(save_class_clouds, class_clouds, path_to_folder) print("Clouds tiles merged!") # polygons evalutation print("Polygons evaluation") polygons(name_zip[k], class_path, class_clouds, path_to_folder, save_class_path, save_imgs, poly_path, time_spaced=None) k = k + 1 return
'31TCG', '30TYN', '30TYM', '30TYL', '30TYK', '30TXN', '30TXM', '30TXL', '30TXK', '30TWM', '30TWL' ] #first search l2a data gdfl2 = gpd.GeoDataFrame() product = ['S2MSI2A', 'S2MSI2AP'] for i in tile: for j in product: productstmp = api.query(filename='*%s*' % i, beginPosition=('20180101', '20181231'), producttype='%s' % j, platformname='Sentinel-2') gdftmp = api.to_geodataframe(productstmp) gdfl2 = pd.concat([gdfl2, gdftmp], sort=True) gdfl2.to_csv('SPN_l2a_18.csv') #then search l1c data gdfl1c = gpd.GeoDataFrame() for i in tile: productstmp = api.query(filename='*%s*' % i, beginPosition=('20160101', '20171231'), producttype='S2MSI1C', platformname='Sentinel-2') gdftmp = api.to_geodataframe(productstmp)