def test_get_product_odata_scihub_down(): api = SentinelAPI("mock_user", "mock_password") request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json" with requests_mock.mock() as rqst: rqst.get( request_url, text="Mock SciHub is Down", status_code=503 ) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.msg == "Mock SciHub is Down" rqst.get( request_url, text='{"error":{"code":null,"message":{"lang":"en","value":' '"No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "}}}', status_code=500 ) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.msg == "No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' " rqst.get( request_url, text="Mock SciHub is Down", status_code=200 ) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.msg == "Mock SciHub is Down" # Test with a real "server under maintenance" response rqst.get( request_url, text=textwrap.dedent("""\ <!doctype html> <title>The Sentinels Scientific Data Hub</title> <link href='https://fonts.googleapis.com/css?family=Open+Sans' rel='stylesheet' type='text/css'> <style> body { text-align: center; padding: 125px; background: #fff;} h1 { font-size: 50px; } body { font: 20px 'Open Sans',Helvetica, sans-serif; color: #333; } article { display: block; text-align: left; width: 820px; margin: 0 auto; } a { color: #0062a4; text-decoration: none; font-size: 26px } a:hover { color: #1b99da; text-decoration: none; } </style> <article> <img alt="" src="/datahub.png" style="float: left;margin: 20px;"> <h1>The Sentinels Scientific Data Hub will be back soon!</h1> <div style="margin-left: 145px;"> <p> Sorry for the inconvenience,<br/> we're performing some maintenance at the moment.<br/> </p> <!--<p><a href="https://scihub.copernicus.eu/news/News00098">https://scihub.copernicus.eu/news/News00098</a></p>--> <p> We'll be back online shortly! </p> </div> </article> """), status_code=502) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
def test_invalid_query(): api = SentinelAPI(**_api_auth) with pytest.raises(SentinelAPIError) as excinfo: api.query(raw="xxx:yyy")
def test_get_product_odata_full(): api = SentinelAPI(**_api_auth) expected_full = { '8df46c9e-a20c-43db-a19a-4240c2ed3b8b': { 'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b', 'title': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC', 'size': 143549851, 'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05', 'date': datetime(2015, 11, 21, 10, 3, 56, 675000), 'footprint': 'POLYGON((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value", 'Acquisition Type': 'NOMINAL', 'Carrier rocket': 'Soyuz', 'Cycle number': 64, 'Date': datetime(2015, 11, 21, 10, 3, 56, 675000), 'Filename': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC.SAFE', 'Footprint': '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml"> <gml:outerBoundaryIs> <gml:LinearRing> <gml:coordinates>-5.880887,-63.852531 -5.075419,-67.495872 -3.084356,-67.066071 -3.880541,-63.430576 -5.880887,-63.852531</gml:coordinates> </gml:LinearRing> </gml:outerBoundaryIs></gml:Polygon>', 'Format': 'SAFE', 'Identifier': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC', 'Ingestion Date': datetime(2015, 11, 21, 13, 22, 4, 992000), 'Instrument': 'SAR-C', 'Instrument abbreviation': 'SAR-C SAR', 'Instrument description': '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>', 'Instrument description text': 'The SAR Antenna Subsystem (SAS) is developed and build by AstriumGmbH. It is a large foldable planar phased array antenna, which isformed by a centre panel and two antenna side wings. In deployedconfiguration the antenna has an overall aperture of 12.3 x 0.84 m.The antenna provides a fast electronic scanning capability inazimuth and elevation and is based on low loss and highly stablewaveguide radiators build in carbon fibre technology, which arealready successfully used by the TerraSAR-X radar imaging mission.The SAR Electronic Subsystem (SES) is developed and build byAstrium Ltd. It provides all radar control, IF/ RF signalgeneration and receive data handling functions for the SARInstrument. The fully redundant SES is based on a channelisedarchitecture with one transmit and two receive chains, providing amodular approach to the generation and reception of wide-bandsignals and the handling of multi-polarisation modes. One keyfeature is the implementation of the Flexible Dynamic BlockAdaptive Quantisation (FD-BAQ) data compression concept, whichallows an efficient use of on-board storage resources and minimisesdownlink times.', 'Instrument mode': 'EW', 'Instrument name': 'Synthetic Aperture Radar (C-band)', 'Instrument swath': 'EW', 'JTS footprint': 'POLYGON ((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))', 'Launch date': 'April 3rd, 2014', 'Mission datatake id': 50722, 'Mission type': 'Earth observation', 'Mode': 'EW', 'NSSDC identifier': '0000-000A', 'Operator': 'European Space Agency', 'Orbit number (start)': 8701, 'Orbit number (stop)': 8701, 'Pass direction': 'DESCENDING', 'Phase identifier': 1, 'Polarisation': 'VV VH', 'Product class': 'S', 'Product class description': 'SAR Standard L1 Product', 'Product composition': 'Slice', 'Product level': 'L1', 'Product type': 'GRD', 'Relative orbit (start)': 54, 'Relative orbit (stop)': 54, 'Resolution': 'Medium', 'Satellite': 'Sentinel-1', 'Satellite description': '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>', 'Satellite name': 'Sentinel-1', 'Satellite number': 'A', 'Sensing start': datetime(2015, 11, 21, 10, 3, 56, 675000), 'Sensing stop': datetime(2015, 11, 21, 10, 4, 29, 714000), 'Size': '223.88 MB', 'Slice number': 1, 'Start relative orbit number': 54, 'Status': 'ARCHIVED', 'Stop relative orbit number': 54, 'Timeliness Category': 'Fast-24h' }, '44517f66-9845-4792-a988-b5ae6e81fd3e': { 'id': '44517f66-9845-4792-a988-b5ae6e81fd3e', 'title': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229', 'size': 5854429622, 'md5': '48C5648C2644CE07207B3C943DEDEB44', 'date': datetime(2015, 12, 27, 14, 22, 29), 'footprint': 'POLYGON((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))', 'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value", 'Cloud cover percentage': 18.153846153846153, 'Date': datetime(2015, 12, 27, 14, 22, 29), 'Degraded MSI data percentage': 0, 'Degraded ancillary data percentage': 0, 'Filename': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229.SAFE', 'Footprint': '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml"> <gml:outerBoundaryIs> <gml:LinearRing> <gml:coordinates>-4.565257232533263,-58.80274769505742 -5.513960396525286,-58.80535376268811 -5.515947033626909,-57.90315169909761 -5.516014389089381,-57.903151791669515 -5.516044812342758,-57.85874693129081 -5.516142631941845,-57.814323596961835 -5.516075248310466,-57.81432351345917 -5.516633044843839,-57.00018056571297 -5.516700066819259,-57.000180565731384 -5.51666329264377,-56.95603179187787 -5.516693539799448,-56.91188395837315 -5.51662651925904,-56.91188396736038 -5.515947927683427,-56.097209386295305 -5.516014937246069,-56.09720929423562 -5.5159111504805916,-56.053056977999596 -5.515874390220655,-56.00892491028779 -5.515807411549814,-56.00892501130261 -5.513685455771881,-55.10621586418906 -4.6092845892233,-55.108821882251775 -4.606372862374043,-54.20840287327946 -3.658594390979672,-54.21169990975238 -2.710949551849636,-54.214267703869346 -2.7127451087194463,-55.15704255065496 -2.71378646425769,-56.0563616875051 -2.7141556791285275,-56.9561852630143 -2.713837142510183,-57.8999998009875 -3.6180222056692726,-57.90079161941062 -3.616721351843382,-58.800616247288836 -4.565257232533263,-58.80274769505742</gml:coordinates> </gml:LinearRing> </gml:outerBoundaryIs></gml:Polygon>', 'Format': 'SAFE', 'Format correctness': 'PASSED', 'General quality': 'PASSED', 'Generation time': datetime(2015, 12, 28, 11, 25, 23, 357), 'Geometric quality': 'PASSED', 'Identifier': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229', 'Ingestion Date': datetime(2015, 12, 28, 10, 57, 13, 725000), 'Instrument': 'MSI', 'Instrument abbreviation': 'MSI', 'Instrument mode': 'INS-NOBS', 'Instrument name': 'Multi-Spectral Instrument', 'JTS footprint': 'POLYGON ((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))', 'Mission datatake id': 'GS2A_20151227T140932_002681_N02.01', 'NSSDC identifier': '2015-000A', 'Orbit number (start)': 2681, 'Pass direction': 'DESCENDING', 'Platform serial identifier': 'Sentinel-2A', 'Processing baseline': 2.01, 'Processing level': 'Level-1C', 'Product type': 'S2MSI1C', 'Radiometric quality': 'PASSED', 'Relative orbit (start)': 110, 'Satellite': 'Sentinel-2', 'Satellite name': 'Sentinel-2', 'Satellite number': 'A', 'Sensing start': datetime(2015, 12, 27, 14, 22, 29), 'Sensing stop': datetime(2015, 12, 27, 14, 22, 29), 'Sensor quality': 'PASSED', 'Size': '5.50 GB' } } for id, expected in expected_full.items(): ret = api.get_product_odata(id, full=True) assert set(ret) == set(expected) for k in ret: assert ret[k] == expected[k]
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt from datetime import date from lib.python import es_logging as log import json logger = log.my_logger(__name__) import re # connect to the API api = SentinelAPI('vijaycharan.v', 'creationvv1!', 'https://scihub.copernicus.eu/dhus') geojson_roi = '/srv/www/eStation2/apps/tools/ex_geojson.geojson' datetime_start = '20180119' datetime_end = date(2018, 01, 20) platformname = 'Sentinel-1' # download single scene by known product id #api.download(<product_id>) # download all results from the search #api.download_all(products) # GeoJSON FeatureCollection containing footprints and metadata of the scenes #api.to_geojson(products) # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries # api.to_geodataframe(products) # # # Get basic information about the product: its title, file size, MD5 sum, date, footprint and # # its download url # api.get_product_odata(<product_id>) #
def satdownload(product_id, geojson, download_path='./downloads/', remove_trash=False, api=None, download_only=False): """ Downloads, extracts and crops products. Args: product_id: str Example: "e3fea737-a83b-4fec-8a5a-68ed8d647c71" geojson: str Path to geojson file. download_path: str, optional location to download products. remove_trash: bool, default Fasle remove unnecessary files after downloading. download_only: bool, default False Download only (Do not extract). api: SentinelAPI api object """ print('Satdownload for ' + product_id) logging.debug('satdownload: ' + product_id) # create downloads folder if os.path.isdir(download_path) is False: os.mkdir(download_path) if api is None: api = SentinelAPI(USERNAME, PASSWORD, 'https://scihub.copernicus.eu/dhus') # query product information product_info = api.get_product_odata(product_id, full=True) sentinel = product_info['Satellite'] # directory for images only target_directory = os.path.join(download_path, product_info['title']) if os.path.isdir(target_directory): print('Product is already processed, skipping product...') return # download if os.path.isfile(os.path.join( download_path, product_info['title'] + '.zip')) is True: print(product_info['title'] + '.zip' + ' exist.') else: satdownload_zip(product_info['id'], download_path, api=api) # skip extraction part if download_only is True: return # extract zip file zipfile_path = os.path.join(download_path, product_info['title'] + '.zip') zip_ref = zipfile.ZipFile(zipfile_path, 'r') zip_ref.extractall(download_path) zip_ref.close() if os.path.isdir( os.path.join(download_path, product_info['Filename'])) is False: raise Exception('Directory not found after unzipping.') # clearing target directory if os.path.isdir(target_directory) is True: shutil.rmtree(target_directory) os.mkdir(target_directory) selection = transform_coordinates(coordinates_from_geojson(geojson)) if sentinel == 'Sentinel-2': # product can contain many tails (located in ./GRANULE/) granule = os.path.join(download_path, product_info['Filename'], 'GRANULE') for i, tail_name in enumerate(os.listdir(granule)): print('\ttail name: ' + tail_name) tail_folder_name = 'tail.{}'.format(i) os.mkdir(os.path.join(target_directory, tail_folder_name)) # image directories are different for different product types image_dir = os.path.join(granule, tail_name, 'IMG_DATA') if product_info['Product type'] == 'S2MSI2Ap': image_dir = os.path.join(image_dir, 'R10m') # move bands into target directory for image in os.listdir(image_dir): image_prime = image if product_info['Product type'] == 'S2MSI2Ap': image_prime = image_prime[4:-8] + '.jp2' os.rename(os.path.join(image_dir, image), os.path.join(target_directory, tail_folder_name, image_prime)) elif sentinel == 'Sentinel-1': # shift selection for sentinel-1 products dx, dy = 130.54544882194287, 20.162166196209284 selection[:, 0] = selection[:, 0] + dx selection[:, 1] = selection[:, 1] - dy # create tail folder tail_folder_name = 'tail.{}'.format(0) os.mkdir(os.path.join(target_directory, tail_folder_name)) # image directories are different for different product types image_dir = os.path.join(download_path, product_info['Filename'], 'measurement') # move bands into target directory for image in os.listdir(image_dir): image_path = os.path.join(image_dir, image) gdal.Warp(image_path, gdal.Open(image_path), dstSRS='EPSG:32638') os.rename(image_path, os.path.join(target_directory, tail_folder_name, image)) else: print('Unknown satellite') # save info file product_info_series = pandas.Series(product_info) with open(os.path.join(target_directory, 'info.txt'), 'w') as f: f.write(product_info_series.to_string()) with open(os.path.join(target_directory, 'info.json'), 'w') as f: product_info_series.to_json(f) # remove unnecessary files if remove_trash is True: os.remove(zipfile_path) shutil.rmtree(os.path.join(download_path, product_info['Filename'])) # cropping images print(target_directory) for tail_name in os.listdir(target_directory): if os.path.isdir(os.path.join(target_directory, tail_name)) is False: continue print('\tprocessing ' + tail_name + ' ...') process_tail(os.path.join(target_directory, tail_name), selection, remove_trash=remove_trash) print('\n\n')
end = datetime(2016, 12, 31) it = end + timedelta(days=1) os.chdir('D:\\AA-remotesensing-artificial-structures\\sensing_data\\raw\\timeseries\\lisboa-setubal\\s2') while it.date() != start.date(): it -= timedelta(days=1) completedir = glob.glob('*' + it.date().strftime("%Y%m%d") + '*') completes = [x for x in completedir if x not in glob.glob('*' + it.date().strftime("%Y%m%d") + '*.incomplete')] if(len(completes) > 0): print("Dia: " + str(it.date()) + " já obtido previamente. Skipping.") continue successful = False while not successful: for tries in range(0, 5, 1): try: print("Dia: " + str(it.date())) api = SentinelAPI('amneves', 'Amnandre12') footprint = geojson_to_wkt(read_geojson('geo.geojson')) products = api.query(footprint, date=(it.date().strftime("%Y%m%d"), (it + timedelta(days=1)).date().strftime("%Y%m%d")), platformname='Sentinel-2', producttype='S2MSI1C', area_relation='Contains', cloudcoverpercentage=(0, 30)) dataframe = api.to_dataframe(products) count = dataframe.shape[0] print(str(count) + " produto(s) neste dia.") #api.download_all(products) #download(api, products) if count == 1: nome = dataframe.get_values()[0][0] p = multiprocessing.Process(target=foo, name="Foo", args=(api,products))
file_name = None product_type = None platform_name = None orbit_direction = None polarisation_mode = None cloud_cover_percentage = None sensor_operational_mode = None # post-search modes printProducts = True writeToDB = False downloadProducts = False getGeoJSON = False # connect to the API api = SentinelAPI(username, password, url) # read geojson geojson = os.path.join(geojson_dir, '%s.geojson' % areacode) footprint = geojson_to_wkt(read_geojson(geojson)) raw_query = '' if file_name is not None: raw_query = raw_query + 'filename:%s AND ' % file_name if product_type is not None: raw_query = raw_query + 'producttype:%s AND ' % product_type if platform_name is not None: raw_query = raw_query + 'platformname:%s AND ' % platform_name if orbit_direction is not None: raw_query = raw_query + 'orbitdirection:%s AND ' % orbit_direction if polarisation_mode is not None:
def __attrs_post_init__(self): self.month_range = relativedelta(months=1) self.api = SentinelAPI(self.user.name, self.user.password, 'https://scihub.copernicus.eu/dhus')
# -*- coding: utf-8 -*- """ Sentinel-5 P data pull @author: Kalkberg """ from sentinelsat import SentinelAPI # install via pip import os import glob from shapely import wkt # Set up API api = SentinelAPI(user='******', password='******', api_url='https://s5phub.copernicus.eu/dhus') # Define area of interest in WKT format # Go to https://arthur-e.github.io/Wicket/sandbox-gmaps3.html and draw one out AOI = '' # Date startdate = '2020401' enddate = '20200430' frequency = 1 # every nth day in date range will be downloaded # Download list of Sentinel S5-P NO2 products in region of interest products = api.query(AOI, date=(startdate,enddate), platformname='Sentinel-5', producttype='L2__NO2___', # useful data types 'L2__SO2___' and 'L2__NO2___' processingmode='Offline', # 'Near real time' or 'Offline' )
(59.51138530046753, 24.825137916849023), (59.459087606762346, 24.907535377786523), (59.4147455486766, 24.929508034036523), (59.39832075950073, 24.844363991067773), (59.37664183245853, 24.814151588724023), (59.35249898189222, 24.75304013852871), (59.32798867805195, 24.573825660989648)] # Copernicus Hub likes coordinates in lng,lat format return Polygon([(y, x) for x, y in tln_points]) username = "******" password = "******" hub = SentinelAPI(username, password, "https://scihub.copernicus.eu/dhus") data_products = hub.query( get_tallinn_polygon(), # which area interests you date=("20200101", "20200420"), cloudcoverpercentage=(0, 10), # we don't want clouds platformname="Sentinel-2", processinglevel="Level-2A" # more processed, ready to use data ) data_products = hub.to_geodataframe(data_products) # we want to avoid downloading overlapping images, so selecting by this keyword data_products = data_products[data_products["title"].str.contains("T35VLF")] print(data_products.shape)
select_date2 = select_date - step print("Step 1: Download Sentinel SAR Product in " + config.name_of_area + " area with select Dates between " + format_date(select_date2) + " and " + format_date(select_date)) logger.info("Download Sentinel SAR Product in " + config.name_of_area + " area with select Dates between " + format_date(select_date2) + " and " + format_date(select_date)) end_date = format_date(select_date + timedelta(days=1)) start_date = format_date(select_date2) url = config.url username = config.username # ask ITC for the username and password password = config.password # # Get info product api = SentinelAPI(username, password) # fill with SMARTSeeds user and password footprint = geojson_to_wkt(input_geojson) products = api.query(footprint, producttype=type_sar, orbitdirection=orbit, date="[{0} TO {1}]".format(start_date, end_date)) dirpath = cwd + config.sentineldirpath if not os.path.exists(dirpath): os.makedirs(dirpath) api.download_all(products, directory_path=dirpath, checksum=True) zipfiles = glob("{}*.zip".format(dirpath)) polygons = []
def test_check_existing(tmpdir): api = SentinelAPI(**_api_auth) ids = [ "5618ce1b-923b-4df2-81d9-50b53e5aded9", "d8340134-878f-4891-ba4f-4df54f1e3ab4", "1f62a176-c980-41dc-b3a1-c735d660c910" ] names = ["S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C", "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E"] paths = [tmpdir.join(fn + ".zip") for fn in names] path_strings = list(map(str, paths)) # Init files used for testing api.download(ids[0], str(tmpdir)) # File #1: complete and correct assert paths[0].check(exists=1, file=1) # File #2: complete but incorrect with paths[1].open("wb") as f: size = 130102 f.seek(size - 1) f.write(b'\0') # File #3: incomplete dummy_content = b'aaaaaaaaaaaaaaaaaaaaaaaaa' with paths[2].open("wb") as f: f.write(dummy_content) assert paths[2].check(exists=1, file=1) # Test expected = {str(paths[1]), str(paths[2])} result = api.check_files(ids=ids, directory=str(tmpdir)) assert set(result) == expected assert result[paths[1]][0]['id'] == ids[1] assert result[paths[2]][0]['id'] == ids[2] assert paths[0].check(exists=1, file=1) assert paths[1].check(exists=1, file=1) assert paths[2].check(exists=1, file=1) result = api.check_files(paths=path_strings) assert set(result) == expected assert result[paths[1]][0]['id'] == ids[1] assert result[paths[2]][0]['id'] == ids[2] assert paths[0].check(exists=1, file=1) assert paths[1].check(exists=1, file=1) assert paths[2].check(exists=1, file=1) result = api.check_files(paths=path_strings, delete=True) assert set(result) == expected assert result[paths[1]][0]['id'] == ids[1] assert result[paths[2]][0]['id'] == ids[2] assert paths[0].check(exists=1, file=1) assert not paths[1].check(exists=1, file=1) assert not paths[2].check(exists=1, file=1) missing_file = str(tmpdir.join( "S1A_EW_GRDH_1SDH_20141003T003840_20141003T003920_002658_002F54_4DD1.zip")) result = api.check_files(paths=[missing_file]) assert set(result) == {missing_file} assert result[missing_file][0]['id'] with pytest.raises(ValueError): api.check_files(ids=ids) with pytest.raises(ValueError): api.check_files() tmpdir.remove()
def test_download_invalid_id(): api = SentinelAPI(**_api_auth) uuid = "1f62a176-c980-41dc-xxxx-c735d660c910" with pytest.raises(SentinelAPIError) as excinfo: api.download(uuid) assert 'Invalid key' in excinfo.value.msg
def test_download(tmpdir): api = SentinelAPI(**_api_auth) uuid = "1f62a176-c980-41dc-b3a1-c735d660c910" filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E" expected_path = tmpdir.join(filename + ".zip") tempfile_path = tmpdir.join(filename + ".zip.incomplete") # Download normally product_info = api.download(uuid, str(tmpdir), checksum=True) assert expected_path.samefile(product_info["path"]) assert not tempfile_path.check(exists=1) assert product_info["title"] == filename assert product_info["size"] == expected_path.size() assert product_info["downloaded_bytes"] == expected_path.size() hash = expected_path.computehash("md5") modification_time = expected_path.mtime() expected_product_info = product_info # File exists, expect nothing to happen product_info = api.download(uuid, str(tmpdir)) assert not tempfile_path.check(exists=1) assert expected_path.mtime() == modification_time expected_product_info["downloaded_bytes"] = 0 assert product_info == expected_product_info # Create invalid but full-sized tempfile, expect re-download expected_path.move(tempfile_path) with tempfile_path.open("wb") as f: f.seek(expected_product_info["size"] - 1) f.write(b'\0') assert tempfile_path.computehash("md5") != hash product_info = api.download(uuid, str(tmpdir)) assert expected_path.check(exists=1, file=1) assert expected_path.computehash("md5") == hash expected_product_info["downloaded_bytes"] = expected_product_info["size"] assert product_info == expected_product_info # Create invalid tempfile, without checksum check # Expect continued download and no exception dummy_content = b'aaaaaaaaaaaaaaaaaaaaaaaaa' with tempfile_path.open("wb") as f: f.write(dummy_content) expected_path.remove() product_info = api.download(uuid, str(tmpdir), checksum=False) assert not tempfile_path.check(exists=1) assert expected_path.check(exists=1, file=1) assert expected_path.computehash("md5") != hash expected_product_info["downloaded_bytes"] = expected_product_info["size"] - len(dummy_content) assert product_info == expected_product_info # Create invalid tempfile, with checksum check # Expect continued download and exception raised dummy_content = b'aaaaaaaaaaaaaaaaaaaaaaaaa' with tempfile_path.open("wb") as f: f.write(dummy_content) expected_path.remove() with pytest.raises(InvalidChecksumError): api.download(uuid, str(tmpdir), checksum=True) assert not tempfile_path.check(exists=1) assert not expected_path.check(exists=1, file=1) tmpdir.remove()
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt user = '******' password = '******' url = 'https://s5phub.copernicus.eu/dhus' search_polygon = './data/landkreis_osnabrueck.geojson' start_date = '20190910' end_date = '20190911' # query api for available products api = SentinelAPI(user, password, url) footprint = geojson_to_wkt(read_geojson(search_polygon)) products = api.query(area=footprint, date=(start_date, end_date)) # convert to panas data frame products_df = api.to_dataframe(products) # inspect data products_df.head() # view top of df products_df.columns # show column names # filter only one product of CO where = products_df.producttypedescription == 'Carbon Monoxide' one_id = products_df.uuid[where][0] # download one product api.download(one_id)
import os import pandas as pd import zipfile import datetime from sentinelsat import SentinelAPI #from geopandas import GeoSeries #import geopandas as gpd #from shapely.geometry import Polygon user = '******' password = '******' api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') class Sentinel_downloader: def image_down(footprint, Date_Ini, Date_Fin, analysis_area, zipped_folder, unzipped_folder, lotes_uni, user_analysis, municipio, departamento): #formato de fechas Date_Ini_c = Date_Ini.replace('-', '') Date_Fin_c = Date_Fin.replace('-', '') #listar imagenes disponibles products = api.query(footprint, date=(Date_Ini_c, Date_Fin_c), platformname='Sentinel-2', processinglevel='Level-1C') #Listado de imagenes satelitales que contienen el area de interes products_gdf = api.to_geodataframe(products) #organizado por fecha products_gdf_sorted = products_gdf.sort_values( ['beginposition'], ascending=[True
dictionar=intersecting_features[i] else: key=list(intersecting_features[i].keys())[0] if key in list(dictionar.keys()): dictionar[key]=dictionar[key]+intersecting_features[i][key] else: dictionar[key]=intersecting_features[i][key] for key in dictionar.keys(): if dictionar[key]/np.sum(list(dictionar.values())) >= percentage: return key else: return '0' # %% #instantiating SentinelAPI connection for imagery download api = SentinelAPI('mitja', 'Copernicus12!','https://scihub.copernicus.eu/dhus') # %% wkt_point='POINT (23.6383667 40.5790194)' time_period=(datetime.date(2020, 3, 1),datetime.date(2020,5,1)) cloudcover_range=(0, 1) # %% products = api.query(wkt_point, date=time_period, platformname='Sentinel-2', cloudcoverpercentage=cloudcover_range) # %% min_coverage = 1 for p in products: if 'tileid' in list(products[p].keys()): if products[p]['cloudcoverpercentage']<min_coverage: min_coverage=products[p]['cloudcoverpercentage']
pd.set_option('display.width', None) pd.set_option('display.max_colwidth', -1) ### Basic parameters, working folders, etc. working_folder = r"S:\users\...\godthaab_iceberg_detection" platformname = "sentinel_1" imagedir = os.path.join(working_folder, "images", platformname, "downloaded") zipfile_dir = os.path.join(imagedir, "zip") scriptpath = os.path.dirname(os.path.realpath(__file__)) today = datetime.now().strftime("%Y%m%d") ### List containing orbitnumber and slice of the relevant scenes orbitnumber_slice = [[54, 1], [127, 5], [25, 5]] ### Create api connection to sentinelhub using sentinelsat api = SentinelAPI('username', 'password', 'https://scihub.copernicus.eu/dhus') ### Search by polygon, time, and SciHub query keywords products_df = pd.DataFrame() for scene in orbitnumber_slice: print(scene) products = api.query(date=('NOW-7DAYS', 'NOW'), platformname='Sentinel-1', sensoroperationalmode='IW', producttype='GRD', polarisationmode='HH HV', relativeorbitnumber=scene[0], slicenumber=scene[1]) ### Convert to Pandas DataFrame
import datetime from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt import psycopg2 api = SentinelAPI('MRDEVISH', 'Research1') today_date = datetime.date.today() footprint = geojson_to_wkt(read_geojson('search_polygon_10.geojson')) platformname = 'Sentinel-3' producttype='SL_2_LST___' products = api.query(footprint,platformname=platformname,producttype=producttype, date = (today_date-1,today_date)) products_pandas = api.to_geodataframe(products) Names = products_pandas['Product Name'] connection1 = psycopg2.connect(user="******", password="******", host="127.0.0.1", port="5432", database="db_products") cursor1 = connection1.cursor() query0 = """select "Product Name" from "Products"; """ cursor1.execute(query0) records = cursor1.fetchone() num=1 undownloaded = [] for i in Names: if i not in records: cursor1 = connection1.cursor() id_ = len(records)+num query = """INSERT INTO "Products" Values({},{},{},0);""".format(id_,i,today_date) cursor1.execute(query0) num=num+1 import pika
This function queries the S2 L2A archive via scihub and returns a list of tiles, with metadata for a region and time period of interest, with an option to filter by cloud cover. It uses the sentinelsat package, and a ROI is specified using a geoJSON file. Note that to use fmask to isolate cloud cover more accurately than the default layer, it is necessary to download the corresponding L1C tile. It is recommended that ultimately the AWS route is taken for downloading the datasets """ import datetime from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt username = '******' pswd = '<password>' download_api = SentinelAPI(username, pswd, api_url='https://scihub.copernicus.eu/dhus/') download_dir = '/disk/scratch/local.2/dmilodow/Sentinel2/L2A/' sites = [ 'Ardfern1', 'Ardfern2', 'Arisaig', 'Auchteraw', 'GlenLoy', 'Mandally', 'Achdalieu' ] start_date = datetime.datetime.strptime( '2019-01-01', '%Y-%m-%d').date() # start date for time period of interest end_date = datetime.datetime.strptime( '2020-01-01', '%Y-%m-%d').date() # end date for time period of interest max_cloud_cover = 67 # percent s1list = []
import os import json import argparse from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt from datetime import date api = SentinelAPI('artuntun', 'h48n4zqwe!', 'https://scihub.copernicus.eu/dhus') # pass the input-file and date parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument("-i", "--input", help="Input coordinates", default='./california.json') parser.add_argument("-ds", "--date-start", help="dates to search thru", default='20190719') parser.add_argument("-de", "--date-end", help="dates to search thru", default='20190729') args = parser.parse_args() # download single scene by known product id # api.download('c23ccf2b-a133-48b4-8389-1778972893dc') # search by polygon, time, and SciHub query keywords footprint = geojson_to_wkt(read_geojson(args.input)) products = api.query(footprint,
def search_sentinels(platform_name, df, aoi, dt=2, user=None, pwd=None, proj_string='+init=EPSG:3995', product_type=None, min_cloud_cover=0, max_cloud_cover=100, swath_type=None, f_out=None): """ Search Sentinel-1/2 images overlapping ICESat-2 data within +- dt Parameters: ----------- platform_name : str ['Sentinel-1 | Sentinel-2'] name of the platform for which images will be searched df : panda dataframe ICESat-2 data aoi: str, list area of interest as WKT string or bounding box[lllon, lllat, urlon, urlat] dt: int, float difference in hours between CS2 and S2 user : str username to connect to the Copernicus Scientific Hub pwd : str password to connect to the Copernicus Scientific Hub proj_string: str projection string to be used with the pyproj module product_type : str name of the type of product to be searched (more info at https://scihub.copernicus.eu/userguide/) swath_type : str name of the type of swath to be searched (Sentinel-1 only, more info at https://scihub.copernicus.eu/userguide/) min_cloud_cover: int, float Minimum cloud coverage in percentage (Sentinel-2 only) max_cloud_cover: int, float Maximum cloud coverage in percentage (Sentinel-2 only) f_out : str path to file where to write results Returns: (to be finished!) -------- """ #========================================================================== # Pre-processing #========================================================================== ### Imports from sentinelsat import SentinelAPI # import wkt import pyproj import numpy as np import shapely.geometry as sg from shapely.wkt import dumps, loads from astropy.time import Time, TimeDelta from tqdm import tqdm ### Convert aoi to shapely polygon in projected CRS # define projection print("Creating AOI polygon...") proj = pyproj.Proj(proj_string) # read aoi polygon if type(aoi) == str: aoi_temp = loads(aoi) elif type(aoi) in (list, tuple): aoi_temp = sg.box(aoi[0], aoi[1], aoi[2], aoi[3]) aoi = aoi_temp.wkt else: print("ERROR: 'aoi' should be provided as a WKT string or bounding box (list)") sys.exit(1) ### Check input parameters if product_type == None: if platform_name == 'Sentinel-1': product_type = 'GRD' print("product_type set to: ", product_type) if platform_name == 'Sentinel-2': product_type = 'S2MSI1C' print("product_type set to: ", product_type) if swath_type == None and platform_name == 'Sentinel-1': swath_type = 'EW' print("swath_type set to: ", swath_type) # project coordinates and convert to shapely polygon x, y = proj(aoi_temp.exterior.xy[0], aoi_temp.exterior.xy[1]) aoi_poly = sg.Polygon(list(zip(x, y))) ### Convert dt to astropy time object dtt = TimeDelta(3600 * dt, format='sec') #========================================================================== # Processing #========================================================================== ### Project IS2 data to desired CRS print("Selecting orbit data inside AOI...") lon, lat = np.array(df['lons']), np.array(df['lats']) x, y = proj(lon, lat) ### Extract IS2 orbit number is2_orbits = np.unique(df['orbit_number']) print("N. of orbits/points inside AOI: {}/{}".format(len(is2_orbits), len(df))) ### Extract time period from IS2 data to query the server t_is2 = Time(df['time'], scale='utc') t_is2_start = min(t_is2) - dtt t_is2_stop = max(t_is2) + dtt ### Read metadata print("Query for metadata...") api = SentinelAPI(user, pwd,'https://scihub.copernicus.eu/dhus', timeout=600) if platform_name == 'Sentinel-1': md = api.query(area=aoi, date=(t_is2_start.datetime, t_is2_stop.datetime), platformname='Sentinel-1', area_relation='Intersects', producttype=product_type, sensoroperationalmode=swath_type) elif platform_name == 'Sentinel-2': md = api.query(area=aoi, date=(t_is2_start.datetime, t_is2_stop.datetime), platformname='Sentinel-2', area_relation='Intersects', cloudcoverpercentage=(min_cloud_cover, max_cloud_cover), producttype=product_type) print("N. of total images: {}".format(len(md))) if len(md) == 0: return [], [], [], [], [], [] ### Convert Sentinel-2 time strings to astropy time objects t_sen = {} print("Converting time to astropy objects...") for el in md: t_sen[el] = Time(md[el]['beginposition'], format='datetime', scale='utc') ### Loop over orbits to find images that satisfy time costraints TimeDict = {} t_is2 = [] print("Looping over orbits to find intersections within {}h...".format(dt)) for c, o in tqdm(enumerate(is2_orbits)): ### select CS2 data d_is2 = df[df['orbit_number'] == o] ### compute CS2 track central time t_temp = Time(d_is2['time'], scale='utc') t_start_is2 = min(t_temp) t_stop_is2 = max(t_temp) t_is2_o = t_start_is2 + (t_stop_is2 - t_start_is2) / 2 t_is2.append(t_is2_o) ### save dict keys of images within +-dt from CS2 i_t = np.array( [el for el in md if np.abs((t_sen[el] - t_is2_o).sec) <= dtt.sec]) TimeDict[o] = i_t # get unique images within +-dt from all orbit data i_sen_t_int = set(np.concatenate(list(TimeDict.values())).ravel()) print("N. of images within {}h: {}".format(dt, len(i_sen_t_int))) if len(i_sen_t_int) == 0: return [], [], [], [], [], [] ### Project images corner coordinates and convert to shapely polygons print("Creating images footprint polygons...") # loop over them, project corner coords and create polygons SenPolygonsDict = {} for i in i_sen_t_int: # load S2 footprint aoi_sen = loads(md[i]['footprint']) # check if multipolygon has more than 1 polygon defined if len(aoi_sen) > 1: print("WARNING: footprint for product {}".format(i), "is defined by more than 1 polygon!!!") aoi_sen = aoi_sen[0] # project corner coords x_sen, y_sen = proj(aoi_sen.exterior.xy[0], aoi_sen.exterior.xy[1]) # add polygon to dictionary SenPolygonsDict[i] = sg.Polygon(list(zip(x_sen, y_sen))) ### Loop over orbits to find spatial intersections print("Looping over orbits to find intersections...") orbit_number = [] product_name = [] browse_url = [] download_url = [] t_diff = [] md_out = {} for c, o in tqdm(enumerate(is2_orbits)): ### select CS2 data i = df['orbit_number'] == o # check if track has at least 2 points if sum(i) < 2: continue d_is2 = df[i] x_is2 = x[i] y_is2 = y[i] ### create shapely line from CS track is2_line = sg.LineString(list(zip(x_is2, y_is2))) ### collect LS8 polygon indices i_sen = TimeDict[o] ### Loop over S2 polygons for i_poly in i_sen: ls_poly = SenPolygonsDict[i_poly] if is2_line.intersects(ls_poly): orbit_number.append(o) t_diff.append((t_sen[i_poly] - t_is2[c]).sec / 3600) product_name.append(md[i_poly]['filename']) download_url.append(md[i_poly]['link']) browse_url.append(md[i_poly]['link_icon']) md_out[i_poly] = md[i_poly] print("N. of total intersections: {}".format(len(orbit_number))) ### Print to file if f_out != None: print("Printing results to {}...".format(f_out)) with open(f_out, 'w') as fp: fp.write("orbit_number,t_diff_(h),product_id,dowload_url,browse_url\n") for i in range(len(orbit_number)): fp.write("{},{:.2f},{},{},{}\n".format( orbit_number[i], t_diff[i], product_name[i], download_url[i], browse_url[i])) return orbit_number, product_name, browse_url, download_url, t_diff, md_out
os.remove(zipfile_path) shutil.rmtree(os.path.join(download_path, product_info['Filename'])) # cropping images print(target_directory) for tail_name in os.listdir(target_directory): if os.path.isdir(os.path.join(target_directory, tail_name)) is False: continue print('\tprocessing ' + tail_name + ' ...') process_tail(os.path.join(target_directory, tail_name), selection, remove_trash=remove_trash) print('\n\n') if __name__ == '__main__': api = SentinelAPI(USERNAME, PASSWORD, 'https://scihub.copernicus.eu/dhus') args = parse_arguments() ids = args.ids download_path = os.path.abspath(os.path.join('./', args.directory)) geojson = args.geojson if os.path.splitext(ids)[1] == '.csv': with open(ids, 'r') as csvfile: csvfile.readline() for line in csv.reader(csvfile, delimiter=','): product_id = line[0] satdownload(product_id, geojson, download_path, api=api, download_only=args.download) elif os.path.splitext(ids)[1] == '.txt': with open(ids, 'r') as txtfile:
# based on https://pypi.python.org/pypi/sentinelsat # http://sentinelsat.readthedocs.io/en/stable/api.html from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt from shapely.geometry import mapping from ocgis import GeomCabinetIterator import json from flyingpigeon import config import getpass from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt password = getpass.getpass() api = SentinelAPI('nilshempelmann', password) geom = { "type": "Polygon", "coordinates": [[[14.00, 8.00], [16.00, 8.00], [16.00, 10.00], [14.00, 10.00], [14.00, 8.00]]] } # geom = {"type": "Polygon", "coordinates": [[[-69.87682044199994, 12.427394924000097], [-70.05809485599988, 12.537176825000088], [-70.04873613199993, 12.632147528000104], [-69.93639075399994, 12.53172435100005], [-69.87682044199994, 12.427394924000097]]]} # footprint = geojson_to_wkt(read_geojson('search_polygon.geojson')) footprint = geojson_to_wkt(geom) from datetime import datetime as dt from datetime import timedelta
def download_best(_box: box, download_path: str, user: str, pw: str) -> tp.List[str]: _api = SentinelAPI(user, pw, 'https://scihub.copernicus.eu/dhus') file_path = os.path.join(download_path, "save.csv") if not os.path.exists(file_path): products = _api.query( _box, date=('NOW-1MONTH', 'NOW'), platformname='Sentinel-2', processinglevel='Level-1C', cloudcoverpercentage=(0, 10), ) products_df = _api.to_dataframe(products) tile_ids = [] def _unknown_tile_id(x: str, t_ids: tp.List) -> bool: ret_val = x in t_ids if not ret_val: t_ids.append(x) return not ret_val # sort products products_df_sorted = products_df.sort_values(["cloudcoverpercentage"], ascending=[True]) # sort out tiles double tiles with higher cloud coverage first_tiles = [ _unknown_tile_id(x, tile_ids) for x in list(products_df_sorted['tileid'].array) ] # first_titles = np.vectorize(_unknown_tile_id(lambda x:x, tile_ids))(products_df_sorted['tileid'].array) products_df_sorted_unique = products_df_sorted[first_tiles] if not os.path.exists(download_path): os.makedirs(download_path) products_df_sorted_unique.to_csv(file_path) else: products_df_sorted_unique = pd.read_pickle(file_path) products_df_sorted_unique['area'] = [ __estimate_area(loads(e)) for e in list(products_df_sorted_unique['footprint'].array) ] # sort out areas smaller than three quarter of the full size of 100 km * 100 km products_df_sorted_unique_larger = products_df_sorted_unique[ products_df_sorted_unique['area'] > 100000 * 100000 / 4 * 3] _api.download_all(products_df_sorted_unique_larger.uuid, download_path) # estimate area from footprint return [ os.path.join(download_path, x) for x in products_df_sorted_unique.title ]
import numpy as np ROOT_DIR = os.path.join(os.getcwd(), "..") SENTINELPRODUCTS_DIR = os.path.join(ROOT_DIR, "sentineldata", "products") GEOJSON_DIR = os.path.join(ROOT_DIR, "datasets") REGION_DATA_FILE = os.path.join(GEOJSON_DIR, "regions.geojson") NDVI_DIR = os.path.join(SENTINELPRODUCTS_DIR, "ndvi") TRAIN_DIR = os.path.join(GEOJSON_DIR, "train") VAL_DIR = os.path.join(GEOJSON_DIR, "val") TEST_DIR = os.path.join(GEOJSON_DIR, "test") url = "https://scihub.copernicus.eu/dhus" user = sys.argv[1] pw = sys.argv[2] api = SentinelAPI(user, pw, url) def download_sentinel_products_for_ROI(geojson_file): print("Searching products for %s" % geojson_file) feature = read_geojson(geojson_file) footprint = geojson_to_wkt(feature.geometry) date = feature.properties["date"] incubation = feature.properties["incubation"] # TODO adjustable coverage interval # Config file? products = api.query(footprint,
def test_count(): api = SentinelAPI(**_api_kwargs) count = api.count(None, ("20150101", "20151231")) assert count > 100000
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt import mysql.connector import time LOGIN = '******' PASSWORD = '******' URL = 'https://scihub.copernicus.eu/dhus' api = SentinelAPI(LOGIN, PASSWORD, URL) while True: mydb = mysql.connector.connect(host="host", user="******", password="******", database="db") mycursor = mydb.cursor() mycursor.execute("SELECT * FROM links") myresult = mycursor.fetchall() # products = api.query(date=(date(2020, 8, 8), date(2020, 8, 9)), platformname='Sentinel-2') # products = api.query(date=('NOW-8HOURS', 'NOW'), producttype='SLC') products = api.query(date=('NOW-8HOURS', 'NOW'), platformname='Sentinel-1') links = [] for i, v in enumerate(products): exist = False for link in myresult: if link[2] == products[v]['link']: exist = True
def test_get_product_info_bad_key(): api = SentinelAPI(**_api_auth) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('invalid-xyz') assert excinfo.value.msg == "InvalidKeyException : Invalid key (invalid-xyz) to access Products"
def test_dhus_version(dhus_url, version): api = SentinelAPI("mock_user", "mock_password", api_url=dhus_url) request_url = dhus_url + "/api/stub/version" with requests_mock.mock() as rqst: rqst.get(request_url, json={"value": version}) assert api.dhus_version == version