def run_download(product_id, out_dir, username, password): # start the downloading with the data id, output directory, and sentinelsat api #file_object = open(out_dir+'data_product_id',"w") #file_object.write(product_id) #file_object.close() url = 'https://scihub.copernicus.eu/dhus' d_api = SentinelAPI(username, password, url) try: download_info = d_api.download(product_id['uuid'], directory_path=out_dir) except: print('Server connection error') return 0 if os.path.exists(out_dir+download_info['title']+'.zip'): os.mknod(out_dir+download_info['title']+'_data_downloaded.ok') print('data_downloaded') retrievel_code = 1 #os.remove(out_dir+'data_product_id') elif download_info['Online']: # os.mknod(out_dir+"online_not_downloaded.ok") print('online_but_not_downloaded') elif not download_info['Online']: retrievel_code = d_api._trigger_offline_retrieval(download_info['url']) # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes if retrievel_code == 202: # os.mknod(out_dir+"retrieval_accepted.ok") print("offline product retrieval accepted") elif retrievel_code == 403: # os.mknod(out_dir+"requests_exceed_quota.ok") print("offline product requests exceed quota") elif retrievel_code == 503: # os.mknod(out_dir+"retrieval_not_accepted.ok") print("offline product retrieval not accepted") return retrievel_code
def test_small_query(): api = SentinelAPI(**_api_kwargs) api.query(**_small_query) assert api.last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")') assert api.last_status_code == 200
def Sen2Download(self, dprofile): download_profile_args = [ dprofile.username, dprofile.password, dprofile.daysdiff, dprofile.shape_file_path, dprofile.download_dir, dprofile.concurrency, ] username, password, daysdiff, shape_file, directory_path, concurrency = download_profile_args logger.info( f'Sentinel-1 Downloads starting with dprofile = {dprofile}') api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') #shapefileto wkt footprint = geojson_to_wkt(read_geojson(shape_file)) #dates to search end_date = datetime.datetime.now() daysdiff = datetime.timedelta(days=daysdiff) start_date = end_date - daysdiff #Search for data products = api.query(footprint, date=(start_date, end_date), platformname='Sentinel-2', producttype='S2MSI1C', cloudcoverpercentage=(0, 30)) self.DownloadProducts(self, products, dprofile)
def downloadProduct(data): index, directory_path, username, password = data api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') download_data = Downloads.objects.filter(product_id=index).update( start_time=datetime.datetime.now(), status='started') download_data = Downloads.objects.get(product_id=index) # download_data.start_time=datetime.datimetime.now() # download_data.status='started' # download_data.save() logger.info(f'Downloading product with product_id {index} Started') try: api.download(index, directory_path=directory_path, checksum=True) download_data.end_time = datetime.datetime.now() download_data.status = 'finished' download_data.save() except Exception as e: logger.exception( f'Exception occured {e} while downloading product with product_id {index}' ) download_data.end_time = datetime.datetime.now() download_data.status = 'error' download_data.save() except AttributeError as e: logger.exception( f'Exception occured {e} while downloading product with product_id {index}' ) download_data.end_time = datetime.datetime.now() download_data.status = 'error' download_data.save() logger.info(f'Downloading product with product_id {index} Finished')
def __init__(self, username, password, archive, backup_archive=None): """Connect API. Raise ProcessorFailedError on failure """ from sentinelsat.sentinel import SentinelAPI, SentinelAPIError # remember settings for query() self.archive = archive self.backup_archive = backup_archive # connect API try: self.api = SentinelAPI(username, password, archive) except (SentinelAPIError, ConnectionError) as e: self.api = None if backup_archive: # re-try with backup archive Logger.error( "Unable to connect {} ({}). Re-trying with {}...".format( archive, e, backup_archive)) try: self.api = SentinelAPI(username, password, backup_archive) except (SentinelAPIError, ConnectionError) as e: self.api = None if self.api is None: raise ProcessorFailedError(self, "Unable to connect: {}".format(e), set_status=False) Logger.debug("Sentinel API connected")
def test_to_geopandas(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") gdf = api.to_geodataframe(products)
def __init__(self, location): self.options = Options(location) self.sql = Esa_Sql(self.options) self.api = SentinelAPI(self.options.user, self.options.password) self.products = [] check_and_clean_log_file()
def download_error_image(img_date,geo_img,img_id,username,password): ''' After read error file(image_error.txt) you can get image info which you failed from COG Sentinel-2, you can use this info with this function if you have more than 1 image, you can download with for loop. You can find img_date, geo_img and img_id information in image_error.txt file. api,target_image_id=download_error_image(img_date,geo_img,img_id,username,password) api.download(target_image_id,directory_path='.') api.download('7be30c50-31fc-48c4-ab45-fddea9be7877',directory_path='.') if you get error like >> Product 7be30c50-31fc-48c4-ab45-fddea9be7877 is not online. Triggering retrieval from long term archive. Go to https://sentinelsat.readthedocs.io/en/stable/api.html#lta-products username and password should be string ''' api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') day_before =img_date- datetime.timedelta(days=1) day_after =img_date + datetime.timedelta(days=1) footprint = geojson_to_wkt(geo_img) products = api.query(footprint, #date = ('20181219', date(2018, 12, 29)), date=(day_before,day_after), platformname = 'Sentinel-2', ) sat_df=api.to_geodataframe(products) result=sat_df.loc[sat_df['title']==img_id] return api,result.index.values[0]
def test_get_product_info(): api = SentinelAPI(**_api_auth) expected_s1 = { 'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b', 'size': 143549851, 'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value", 'date': '2015-11-21T10:03:56Z', 'footprint': '-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,' '-63.852531 -5.880887', 'title': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC' } expected_s2 = { 'date': '2015-12-27T14:22:29Z', 'footprint': '-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,' '-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,' '-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,' '-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,' '-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,' '-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,' '-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,' '-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,' '-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,' '-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263', 'id': '44517f66-9845-4792-a988-b5ae6e81fd3e', 'md5': '48C5648C2644CE07207B3C943DEDEB44', 'size': 5854429622, 'title': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value" } assert api.get_product_info('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') == expected_s1 assert api.get_product_info('44517f66-9845-4792-a988-b5ae6e81fd3e') == expected_s2
def download_scene(scene): """Download a scene and change its status """ from sentinelsat.sentinel import SentinelAPI from django.conf import settings path = join(settings.MEDIA_ROOT, scene.sat, scene.identifier) check_create_folder(path) try: api = SentinelAPI( settings.SENTINEL_USER, settings.SENTINEL_PASSWORD, settings.SENTINEL_API_URL ) except AttributeError: api = SentinelAPI(settings.SENTINEL_USER, settings.SENTINEL_PASSWORD) try: print('Changing status of scene %s to downloading' % scene.identifier) scene.change_status('downloading') print('Starting download of product %s on path %s' % (scene.product, path)) api.download(scene.product, path) print('Changing status of scene %s to downloaded' % scene.identifier) scene.change_status('downloaded') except Exception as exp: print('Unexpected error: %s' % exp) print('Changing status of scene %s to dl_failed' % scene.identifier) scene.change_status('dl_failed')
def run_download_from_pool(product_id, out_dir, username, password): url = 'https://scihub.copernicus.eu/dhus' d_api = SentinelAPI(username, password, url) try: download_info = d_api.download(product_id, directory_path=out_dir) except: print('Server connection error') return 0 if os.path.exists(out_dir+'/'+download_info['title']+'.zip'): os.mknod(out_dir+'/'+download_info['title']+'.ok') print('data_downloaded') #os.remove(out_dir+'data_product_id') elif download_info['Online']: # os.mknod(out_dir+"online_not_downloaded.ok") print('online_but_not_downloaded') elif not download_info['Online']: retrievel_code = d_api._trigger_offline_retrieval(download_info['url']) # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes if retrievel_code == 202: # os.mknod(out_dir+"retrieval_accepted.ok") print("offline product retrieval accepted") elif retrievel_code == 403: # os.mknod(out_dir+"requests_exceed_quota.ok") print("offline product requests exceed quota") elif retrievel_code == 503: # os.mknod(out_dir+"retrieval_not_accepted.ok") print("offline product retrieval not accepted") return download_info
def retrieve_mata_data(filename,username,password): url = 'https://scihub.copernicus.eu/dhus' info = filename.split('_') satellite = info[0] mode = info[1] product = info[2] orbitnumber = np.int(info[7]) time_start = np.int(info[5].split('T')[0])-1 time_end = str(np.int(time_start+2)) time_start = str(time_start) api = SentinelAPI(username, password, url) products = api.query( beginposition=(time_start,time_end), platformname='Sentinel-1', producttype=product, sensoroperationalmode=mode, polarisationmode='VV VH', orbitnumber=orbitnumber ) products_df = api.to_dataframe(products) index = -1 for i in range(len(products_df)): if products_df['title'][i] in filename: index = i return products_df.iloc[index]
def test_SentinelAPI_wrong_credentials(): api = SentinelAPI( "wrong_user", "wrong_password" ) with pytest.raises(SentinelAPIError) as excinfo: api.query(**_small_query) assert excinfo.value.http_status == 401
def test_format_url(): api = SentinelAPI(**_api_kwargs) start_row = 0 url = api.format_url(start_row=start_row) assert url is api.url assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format( rows=api.max_rows, start=start_row)
def test_format_url(): api = SentinelAPI(**_api_kwargs) start_row = 0 url = api.format_url(start_row=start_row) assert url is api.url assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format( rows=api.page_size, start=start_row)
def test_format_url_custom_api_url(): api = SentinelAPI("user", "pw", api_url='https://scihub.copernicus.eu/dhus/') url = api.format_url() assert url.startswith('https://scihub.copernicus.eu/dhus/search') api = SentinelAPI("user", "pw", api_url='https://scihub.copernicus.eu/dhus') url = api.format_url() assert url.startswith('https://scihub.copernicus.eu/dhus/search')
def test_large_query(): api = SentinelAPI(**_api_kwargs) api.query(**_large_query) assert api.last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-12-31T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))")') assert api.last_status_code == 200 assert len(api.products) > api.max_rows
def test_large_query(): api = SentinelAPI(**_api_kwargs) api.query(**_large_query) assert api.last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-12-31T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))")') assert api.last_status_code == 200 assert len(api.products) > api.page_size
def test_to_pandas(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") df = api.to_dataframe(products) assert 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112701_R110_V20151227T142229_20151227T142229' in df.index
def test_SentinelAPI_connection(): api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD']) api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2)) assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000' assert api.last_query == '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")' assert api.content.status_code == 200
def download_scenes(period): date_from = period.date_from date_to = period.date_to # Check if result has already been done scene_filename = 's1_{dfrom}_{dto}.tif'.format( dfrom=period.date_from.strftime('%Y%m'), dto=period.date_to.strftime('%Y%m')) scene_path = os.path.join(RESULTS_PATH, scene_filename) if os.path.exists(scene_path): print( "Sentinel-1 mosaic for period {}-{} already done:".format( date_from, date_to), scene_path) return # Prepare API client for download api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS, settings.SCIHUB_URL) # Query scenes footprint = geojson_to_wkt(read_geojson(AOI_PATH)) products = api.query(footprint, date=(date_from, date_to), platformname='Sentinel-1', producttype='GRD', polarisationmode='VV VH', orbitdirection='ASCENDING') for k, p in products.items(): print((k, p['summary'])) os.makedirs(S1_RAW_PATH, exist_ok=True) # Filter already downloaded products products_to_download = { k: v for k, v in products.items() if not os.path.exists( os.path.join(S1_RAW_PATH, '{}.zip'.format(v['title']))) } # Download products results = api.download_all(products_to_download, directory_path=S1_RAW_PATH) products = list(products.values()) # Process the images of each product with mp.Pool(settings.S1_PROC_NUM_JOBS) as pool: pool.map(process_product, products) # Create a median composite from all images of each band, generate extra # bands and concatenate results into a single multiband imafge. superimpose(products) median(products, period) generate_vvvh(period) concatenate_results(period) clip_result(period) clean_temp_files(period)
def s5down(datum=str(dt.date.today()), product_type='no2'): # this function will download s5p data of given product_type for given date # e.g. s5down('2019-08-15','no2') if product_type == 'no2': strtype = 'L2__NO2___' if product_type == 'aerosols': strtype = 'L2__AER_AI' datum = dt.datetime.strptime(datum, '%Y-%m-%d').date() time_in = dt.datetime.combine(datum, dt.time(0, 0)) time_out = dt.datetime.combine(datum, dt.time(23, 59)) api = SentinelAPI('s5pguest', 's5pguest', 'https://s5phub.copernicus.eu/dhus') #coordinates for CZ: footprint = 'POLYGON((12.278971773041526 48.69059060056844,18.98957262575027 48.69059060056844,18.98957262575027 51.081759060281655,12.278971773041526 51.081759060281655,12.278971773041526 48.69059060056844))' products = api.query(footprint, date=(time_in, time_out), platformname='Sentinel-5', producttype=strtype) print('there are ' + str(len(products)) + ' products found') a = api.download_all(products) geotiffs = [] for product_ID in iter(products): product_path = a[0][product_ID]['path'] print('converting ' + product_path + ' to geotiff') geotiffs.append(s5p2geotiff(product_path, product_type)) if not geotiffs: print('some error happened, no geotiffs generated') clean_downloaded(products, a) return None tifstring = '' for tif in geotiffs: tifstring = tifstring + ' ' + tif print('merging geotiffs to ' + str(datum) + '.tif and cropping for CZ extents') outfile = str(datum) + '.' + product_type + '.tif' tmpfile = 'tmp.tif' os.system( 'gdal_merge.py -o merged.tif -of GTiff -ul_lr 11.3867 51.4847 19.943 47.7933 -a_nodata 9999 ' + tifstring) if product_type == 'no2': #need to compute 1000x gdal_calc = 'gdal_calc.py -A merged.tif --outfile=' + tmpfile + ' --calc="(A*1000 > 0)*(A * 1000 < 0.7)*(A * 1000)" --overwrite' print(gdal_calc) os.system(gdal_calc) else: tmpfile = 'merged.tif' #now oversample using cubic.. gdalwarp = 'gdalwarp -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 ' + tmpfile + ' ' + outfile #gdalwarp = 'gdalwarp -s_srs EPSG:4326 -t_srs EPSG:4326 -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 temp1000.tif '+outfile print(gdalwarp) os.system(gdalwarp) print('(the file will be also saved as {}.tif)'.format(product_type)) copyfile(outfile, '../data/' + product_type + '.tif') #cleaning clean_downloaded(products, a) return geotiffs
def test_get_products_size(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2" ) assert api.get_products_size() == 63.58
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]") assert len(products) == 3 assert products[0]["id"] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert products[1]["id"] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert products[2]["id"] == "0848f6b8-5730-4759-850e-fc9945d42296"
def test_SentinelAPI_connection(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2)) assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000' + \ '&q=(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")' assert api.content.status_code == 200
def test_to_dict(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") dictionary = api.to_dict(products) # check the type assert isinstance(dictionary, dict) # check if dictionary has id key assert 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112701_R110_V20151227T142229_20151227T142229' in dictionary
def test_get_product_info_scihub_down(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.get( "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json", text="Mock SciHub is Down", status_code=503 ) with pytest.raises(ValueError) as val_err: api.get_product_info('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert val_err.value.message == "Invalid API response. JSON decoding failed."
def test_SentinelAPI_connection(): api = SentinelAPI(**_api_auth) api.query(**_small_query) assert api.url.startswith( 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}'. format(rows=api.max_rows)) assert api.last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")') assert api.last_status_code == 200
def test_footprints(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) api.query(get_coordinates('tests/map.geojson'), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD") expected_footprints = geojson.loads(open('tests/expected_search_footprints.geojson', 'r').read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.get_footprints()) == set(expected_footprints)
def test_footprints_s2(): api = SentinelAPI(**_api_auth) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2" ) with open('tests/expected_search_footprints_s2.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.get_footprints()) == set(expected_footprints)
def test_footprints_s1(): api = SentinelAPI(**_api_auth) api.query( get_coordinates('tests/map.geojson'), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD" ) with open('tests/expected_search_footprints_s1.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.get_footprints()) == set(expected_footprints)
def test_footprints_s2(): api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD']) api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") with open('tests/expected_search_footprints_s2.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.get_footprints()) == set(expected_footprints)
def test_footprints_s1(): api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD']) api.query(get_coordinates('tests/map.geojson'), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD") with open('tests/expected_search_footprints_s1.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.get_footprints()) == set(expected_footprints)
def test_trail_slash_base_url(): base_urls = [ 'https://scihub.copernicus.eu/dhus/', 'https://scihub.copernicus.eu/dhus' ] expected = 'https://scihub.copernicus.eu/dhus/' for test_url in base_urls: assert SentinelAPI._url_trail_slash(test_url) == expected api = SentinelAPI("mock_user", "mock_password", test_url) assert api.api_url == expected
def test_footprints_s2(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") with open('tests/expected_search_footprints_s2.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.to_geojson(products)) == set(expected_footprints)
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.post('https://scihub.copernicus.eu/apihub/search?format=json', text="{Invalid JSON response", status_code=200) with pytest.raises(SentinelAPIError) as excinfo: api.query(area=get_coordinates("tests/map.geojson"), initial_date="20151219", end_date="20151228", platformname="Sentinel-2") assert excinfo.value.msg == "API response not valid. JSON decoding failed."
def __init__(self, date_start, date_finish, footprint=settings.footprint, platformname="Sentinel-3"): self.date_start = date_start self.date_finish = date_finish self.api = SentinelAPI(settings.sentinel_api_user, settings.sentinel_api_key, 'https://scihub.copernicus.eu/dhus') self.wkt_footprint = footprint self.products = self.query_products(self.date_start, self.date_finish)
def download(user, password, productid, path, md5, url): """Download a Sentinel Product. It just needs your SciHub user and password and the id of the product you want to download. """ api = SentinelAPI(user, password, url) try: api.download(productid, path, md5) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) else: raise
def _search_on_hub(user, password, hub_address, **search_keywords): # Connect to the hub and search try: print(SentinelAPI.format_query(**search_keywords)) hub = SentinelAPI(user, password, hub_address) products = hub.query(**search_keywords) except SentinelAPIError as e: print(e) print(SentinelAPI.format_query(**search_keywords)) products = {} return products
def download(user, password, productid, path, md5, url): """Download a Sentinel Product with your Copernicus Open Access Hub user and password and the id of the product you want to download. """ api = SentinelAPI(user, password, url) try: api.download(productid, path, md5) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) else: raise
def test_get_product_info(): api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD']) expected_s1 = { 'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b', 'size': 143549851, 'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value", 'date': '2015-11-21T10:03:56Z', 'footprint': '-5.880887 -63.852531,-5.075419 -67.495872,-3.084356 -67.066071,-3.880541 -63.430576,' '-5.880887 -63.852531', 'title': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC' } expected_s2 = { 'date': '2015-12-27T14:22:29Z', 'footprint': '-4.565257232533263 -58.80274769505742,-5.513960396525286 -58.80535376268811,-5.515947033626909 ' '-57.90315169909761,-5.516014389089381 -57.903151791669515,-5.516044812342758 -57.85874693129081,' '-5.516142631941845 -57.814323596961835,-5.516075248310466 -57.81432351345917,-5.516633044843839 ' '-57.00018056571297,-5.516700066819259 -57.000180565731384,-5.51666329264377 -56.95603179187787,' '-5.516693539799448 -56.91188395837315,-5.51662651925904 -56.91188396736038,-5.515947927683427 ' '-56.097209386295305,-5.516014937246069 -56.09720929423562,-5.5159111504805916 ' '-56.053056977999596,-5.515874390220655 -56.00892491028779,-5.515807411549814 -56.00892501130261,' '-5.513685455771881 -55.10621586418906,-4.6092845892233 -55.108821882251775,-4.606372862374043 ' '-54.20840287327946,-3.658594390979672 -54.21169990975238,-2.710949551849636 -54.214267703869346,' '-2.7127451087194463 -55.15704255065496,-2.71378646425769 -56.0563616875051,-2.7141556791285275 ' '-56.9561852630143,-2.713837142510183 -57.8999998009875,-3.6180222056692726 -57.90079161941062,' '-3.616721351843382 -58.800616247288836,-4.565257232533263 -58.80274769505742', 'id': '44517f66-9845-4792-a988-b5ae6e81fd3e', 'md5': '48C5648C2644CE07207B3C943DEDEB44', 'size': 5854429622, 'title': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value" } assert api.get_product_info( '8df46c9e-a20c-43db-a19a-4240c2ed3b8b') == expected_s1 assert api.get_product_info( '44517f66-9845-4792-a988-b5ae6e81fd3e') == expected_s2
class Watcher: def __init__(self, location): self.options = Options(location) self.sql = Esa_Sql(self.options) self.api = SentinelAPI(self.options.user, self.options.password) self.products = [] check_and_clean_log_file() def watch(self): try: self.find_candidate_products() self.filter_for_unknown_products() self.filter_for_subscription_intersection() self.insert_products_in_db() finally: self.options.set_running('0') log.info("Done") def find_candidate_products(self): log.info("Finding candidate products at ESA") self.candidate_products = self.api.query(limit=self.options.num_back, producttype="SLC", ingestiondate=(f"{self.options.last_search_time}Z", datetime.isoformat(datetime.now())+"Z")) self.candidate_products.update(self.api.query(limit=self.options.num_back, producttype="GRD", ingestiondate=(f"{self.options.last_search_time}Z", datetime.isoformat(datetime.now())+"Z"))) self.options.update_last_search_time() log.info("Inspecting {} products".format(len(self.candidate_products))) def filter_for_unknown_products(self): for product in self.candidate_products: if not self.sql.check_pg_db_for_product( self.candidate_products[product]['identifier']): self.products.append( {'granule': self.candidate_products[product]['identifier'], 'url': self.candidate_products[product]['link_icon'], 'location': self.candidate_products[product]['footprint']} ) log.info(f"{product['identifier']} is unknown to ASF") def filter_for_subscription_intersection(self): for product in self.products: if not self.sql.check_hyp3_db_for_intersecting_subscription(product): log.info(f"{product['identifier']} did not match any Hyp3 subscriptions") self.products.remove(product) def insert_products_in_db(self): for product in self.products: self.sql.insert_product_in_db(product)
def test_get_product_info(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) expected = {'id': '079ed72f-b330-4918-afb8-b63854e375a5', 'title': 'S1A_IW_GRDH_1SDV_20150527T081303_20150527T081328_006104_007EB2_E65B', 'size': 1051461964, 'footprint': '-21.032057 -39.925808,-20.472944 -42.301277,-18.975924 -41.904408,-19.528255 -39.549416,-21.032057 -39.925808', 'url': "https://scihub.esa.int/dhus/odata/v1/Products('079ed72f-b330-4918-afb8-b63854e375a5')/$value" } assert api.get_product_info('079ed72f-b330-4918-afb8-b63854e375a5') == expected
def test_api_query_format(): api = SentinelAPI("mock_user", "mock_password") now = datetime.now() query = api.format_query('0 0,1 1,0 1,0 0', end_date=now) last_24h = format_date(now - timedelta(hours=24)) assert query == '(beginPosition:[%s TO %s]) ' % (last_24h, format_date(now)) + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")' query = api.format_query('0 0,1 1,0 1,0 0', end_date=now, producttype='SLC') assert query == '(beginPosition:[%s TO %s]) ' % (last_24h, format_date(now)) + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))") ' + \ 'AND (producttype:SLC)'
def test_SentinelAPI_connection(): api = SentinelAPI(**_api_auth) api.query(**_small_query) assert api.url.startswith( 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}'.format( rows=api.page_size ) ) assert api.last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")') assert api.last_status_code == 200
def async_product_worker(platform, product, date, auth, CATALOG_ROOT): api = SentinelAPI(auth['U'], auth['P']) products = api.query(date=(date, date + timedelta(days=1)), platformname=platform, producttype=product) df = pd.DataFrame.from_dict(products, orient='index') df.to_parquet( os.path.join( CATALOG_ROOT, '_'.join([platform, product, date.isoformat()[0:10]]) + '.parquet'))
def test_s2_cloudcover(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]" ) assert len(api.get_products()) == 2 assert api.get_products()[0]["id"] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert api.get_products()[1]["id"] == "0848f6b8-5730-4759-850e-fc9945d42296"
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.post( 'https://scihub.copernicus.eu/apihub/search?format=json', text="{Invalid JSON response", status_code=200 ) with pytest.raises(SentinelAPIError) as excinfo: api.query( area=get_coordinates("tests/map.geojson"), initial_date="20151219", end_date="20151228", platformname="Sentinel-2" ) api.get_products() assert excinfo.value.msg == "API response not valid. JSON decoding failed."
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.get( 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000&q=(beginPosition:[2015-12-19T00:00:00Z TO 2015-12-28T00:00:00Z]) AND (footprint:"Intersects(POLYGON((-66.2695312 -8.0592296,-66.2695312 0.7031074,-57.3046875 0.7031074,-57.3046875 -8.0592296,-66.2695312 -8.0592296)))") AND (platformname:Sentinel-2)', text="Invalid JSON response", status_code=200 ) api.query( area=get_coordinates("tests/map.geojson"), initial_date="20151219", end_date="20151228", platformname="Sentinel-2" ) with pytest.raises(ValueError) as val_err: api.get_products() assert val_err.value.message == "API response not valid. JSON decoding failed."
def test_api_url_format(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) now = datetime.now() api.format_url('0 0,1 1,0 1,0 0', end_date=now) last_24h = format_date(now - timedelta(hours=24)) assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000' + \ '&q=(beginPosition:[%s TO %s]) ' % (last_24h, format_date(now)) + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")' api.format_url('0 0,1 1,0 1,0 0', end_date=now, producttype='SLC') assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000' + \ '&q=(beginPosition:[%s TO %s]) ' % (last_24h, format_date(now)) + \ 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))") ' + \ 'AND (producttype:SLC)'
def test_download(tmpdir): api = SentinelAPI(**_api_auth) uuid = "1f62a176-c980-41dc-b3a1-c735d660c910" filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E" expected_path = tmpdir.join(filename + ".zip") # Download normally path, product_info = api.download(uuid, str(tmpdir), checksum=True) assert expected_path.samefile(path) assert product_info["id"] == uuid assert product_info["title"] == filename assert product_info["size"] == expected_path.size() hash = expected_path.computehash() modification_time = expected_path.mtime() expected_product_info = product_info # File exists, test with checksum # Expect no modification path, product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.mtime() == modification_time assert product_info == expected_product_info # File exists, test without checksum # Expect no modification path, product_info = api.download(uuid, str(tmpdir), check_existing=False) assert expected_path.mtime() == modification_time assert product_info == expected_product_info # Create invalid file, expect re-download with expected_path.open("wb") as f: f.seek(expected_product_info["size"] - 1) f.write(b'\0') assert expected_path.computehash("md5") != hash path, product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.computehash("md5") == hash assert product_info == expected_product_info # Test continue with expected_path.open("rb") as f: content = f.read() with expected_path.open("wb") as f: f.write(content[:100]) assert expected_path.computehash("md5") != hash path, product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.computehash("md5") == hash assert product_info == expected_product_info # Test MD5 check with expected_path.open("wb") as f: f.write(b'abcd' * 100) assert expected_path.computehash("md5") != hash with pytest.raises(InvalidChecksumError): api.download(uuid, str(tmpdir), check_existing=True, checksum=True)
def test_get_products_size(): api = SentinelAPI(**_api_auth) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2" ) assert api.get_products_size() == 63.58 # reset products api.products = [] # load new very small query api.load_query("S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(api.get_products()) > 0 # Rounded to zero assert api.get_products_size() == 0
def query_and_create(self, start=None, end=None): '''Query the last scene available on period filtered by start time and end dates. By default start is current time and end is 7 days ago.''' end = end or datetime.utcnow() start = start or datetime.utcnow() - timedelta(days=7) coords = ','.join([('%f %f' % coord) for coord in self.geom.coords[0]]) scenes_created = [] try: api = SentinelAPI(settings.SENTINEL_USER, settings.SENTINEL_PASSWORD, settings.SENTINEL_API_URL) print('sentinel initialized on %s, with %s - %s' % (settings.SENTINEL_API_URL, settings.SENTINEL_USER, settings.SENTINEL_PASSWORD)) except AttributeError: api = SentinelAPI(settings.SENTINEL_USER, settings.SENTINEL_PASSWORD) print('sentinel initialized on %s, with %s - %s' % (settings.SENTINEL_API_URL, ettings.SENTINEL_USER, settings.SENTINEL_PASSWORD)) print('sentinelsat query -s %s -e %s coords %s q %s' % (start, end, coords, self.query)) if self.query: query = dict([i.split('=') for i in self.query.split(',')]) api.query(coords, start, end, **query) else: api.query(coords, start, end) features = api.get_footprints()['features'] print('%s features found' % len(features)) for feature in features: product_id = feature['properties']['product_id'] try: Scene.objects.get(product=product_id) print('Scene of product %s already exists' % product_id) except Scene.DoesNotExist: print('Creating scene with data: %s' % feature) scene = Scene.objects.create( product=product_id, identifier=feature['properties']['identifier'], date=datetime.strptime( feature['properties']['date_beginposition'], '%Y-%m-%dT%H:%M:%S.%fZ' ), polarisation=feature['properties']['polarisationmode'], orbit_direction=feature['properties']['orbitdirection'], sensor_mode=feature['properties']['sensoroperationalmode'], product_type=feature['properties']['producttype'], sat=feature['properties']['platformname'], geom=Polygon(feature['geometry']['coordinates'][0])) print('Scene of product %s created' % product_id) scenes_created.append(scene) return scenes_created
def test_download_all(tmpdir): api = SentinelAPI(**_api_auth) # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E", "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"] api.load_query(" OR ".join(filenames)) assert len(api.get_products()) == len(filenames) # Download normally result = api.download_all(str(tmpdir)) assert len(result) == len(filenames) for path, product_info in result.items(): pypath = py.path.local(path) assert pypath.purebasename in filenames assert pypath.check(exists=1, file=1) assert pypath.size() == product_info["size"] # Force one download to fail path, product_info = list(result.items())[0] py.path.local(path).remove() with requests_mock.mock(real_http=True) as rqst: url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info["id"] json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" rqst.get(url, json=json) result = api.download_all(str(tmpdir), max_attempts=1, checksum=True) assert len(result) == len(filenames) assert result[path] is None
def test_SentinelAPI_wrong_credentials(): api = SentinelAPI( "wrong_user", "wrong_password" ) api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2)) assert api.content.status_code == 401 with pytest.raises(ValueError): api.get_products_size() api.get_products()
def search( user, password, geojson, start, end, download, md5, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if cloud: search_kwargs.update( {"platformname": "Sentinel-2", "cloudcoverpercentage": "[0 TO %s]" % cloud}) elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) api.query(get_coordinates(geojson), start, end, **search_kwargs) if footprints is True: footprints_geojson = api.get_footprints() with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: result = api.download_all(path, checksum=md5) if md5 is True: corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None] if len(corrupt_scenes) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for corrupt_tuple in corrupt_scenes: outfile.write("%s : %s\n" % corrupt_tuple) else: for product in api.get_products(): print('Product %s - %s' % (product['id'], product['summary'])) print('---') print( '%s scenes found with a total size of %.2f GB' % (len(api.get_products()), api.get_products_size()))
def test_trail_slash_base_url(): base_urls = [ 'https://scihub.copernicus.eu/dhus/', 'https://scihub.copernicus.eu/dhus' ] expected = 'https://scihub.copernicus.eu/dhus/' for test_url in base_urls: assert SentinelAPI._url_trail_slash(test_url) == expected api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD'), test_url ) assert api.api_url == expected
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]" ) assert len(api.get_products()) == 3 assert api.get_products()[0]["id"] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert api.get_products()[1]["id"] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert api.get_products()[2]["id"] == "0848f6b8-5730-4759-850e-fc9945d42296"
def search(user, password, geojson, start, end, download, footprints, path, query): """Search for Sentinel-1 products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password) if query is not None: query = dict([i.split('=') for i in query.split(',')]) api.query(get_coordinates(geojson), start, end, **query) else: api.query(get_coordinates(geojson), start, end) if footprints is True: footprints_geojson = api.get_footprints() with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: api.download_all(path) else: for product in api.get_products(): print('Product %s - %s' % (product['id'], product['summary']))