def test_date_arithmetic(): api = SentinelAPI(**_api_kwargs) products = api.query('ENVELOPE(0, 10, 10, 0)', ('2016-12-01T00:00:00Z-1DAY', '2016-12-01T00:00:00Z+1DAY-1HOUR')) assert api._last_response.status_code == 200 assert len(products) > 0
def test_small_query(): api = SentinelAPI(**_api_kwargs) api.query(**_small_query) assert api._last_query == ( '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")') assert api._last_status_code == 200
def test_download_all(tmpdir): api = SentinelAPI(**_api_auth) # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E", "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"] ids = list(api.query_raw(" OR ".join(filenames))) assert len(ids) == len(filenames) # Download normally product_infos, failed_downloads = api.download_all(ids, str(tmpdir)) assert len(failed_downloads) == 0 assert len(product_infos) == len(filenames) for product_id, product_info in product_infos.items(): pypath = py.path.local(product_info['path']) assert pypath.purebasename in filenames assert pypath.check(exists=1, file=1) assert pypath.size() == product_info["size"] # Force one download to fail id, product_info = list(product_infos.items())[0] path = product_info['path'] py.path.local(path).remove() with requests_mock.mock(real_http=True) as rqst: url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" rqst.get(url, json=json) product_infos, failed_downloads = api.download_all( ids, str(tmpdir), max_attempts=1, checksum=True) assert len(failed_downloads) == 1 assert len(product_infos) + len(failed_downloads) == len(filenames) assert id in failed_downloads
def test_format_url_custom_api_url(): api = SentinelAPI("user", "pw", api_url='https://scihub.copernicus.eu/dhus/') url = api._format_url() assert url.startswith('https://scihub.copernicus.eu/dhus/search') api = SentinelAPI("user", "pw", api_url='https://scihub.copernicus.eu/dhus') url = api._format_url() assert url.startswith('https://scihub.copernicus.eu/dhus/search')
def raw_products(api_kwargs, vcr, test_wkt): """A fixture for tests that need some non-specific set of products in the form of a raw response as input.""" with vcr.use_cassette('products_fixture', decode_compressed_response=False): api = SentinelAPI(**api_kwargs) raw_products = api._load_query( api.format_query(test_wkt, ("20151219", "20151228")) )[0] return raw_products
def products(): """A fixture for tests that need some non-specific set of products as input.""" api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228" ) return products
def test_large_query(): api = SentinelAPI(**_api_kwargs) products = api.query(**_large_query) assert api._last_query == ( '(beginPosition:[2015-12-01T00:00:00Z TO 2015-12-31T00:00:00Z]) ' 'AND (footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))")') assert api._last_status_code == 200 assert len(products) > api.page_size
def test_SentinelAPI_wrong_credentials(): api = SentinelAPI( "wrong_user", "wrong_password" ) with pytest.raises(SentinelAPIError) as excinfo: api.query(**_small_query) assert excinfo.value.response.status_code == 401
def raw_products(): """A fixture for tests that need some non-specific set of products in the form of a raw response as input.""" api = SentinelAPI(**_api_auth) raw_products = api._load_query(api.format_query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228") ) return raw_products
def test_SentinelAPI_connection(): api = SentinelAPI(**_api_auth) api.query(**_small_query) assert api._last_query == ( 'beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z] ' 'footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))"') assert api._last_response.status_code == 200
def test_quote_symbol_bug(): # A test to check if plus symbol handling works correctly on the server side # It used to raise an error but has since been fixed # https://github.com/SentinelDataHub/DataHubSystem/issues/23 api = SentinelAPI(**_api_kwargs) q = 'beginposition:[2017-05-30T00:00:00Z TO 2017-05-31T00:00:00Z+1DAY]' count = api.count(raw=q) assert count > 0
def test_get_products_size(products): assert SentinelAPI.get_products_size(products) == 90.94 # load a new very small query api = SentinelAPI(**_api_auth) with my_vcr.use_cassette('test_get_products_size'): products = api.query_raw("S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(products) > 0 # Rounded to zero assert SentinelAPI.get_products_size(products) == 0
def products(api_kwargs, vcr, test_wkt): """A fixture for tests that need some non-specific set of products as input.""" with vcr.use_cassette('products_fixture', decode_compressed_response=False): api = SentinelAPI(**api_kwargs) products = api.query( test_wkt, ("20151219", "20151228") ) assert len(products) > 20 return products
def test_get_products_size(api, vcr, products): assert SentinelAPI.get_products_size(products) == 75.4 # load a new very small query with vcr.use_cassette('test_get_products_size'): products = api.query( raw="S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(products) > 0 # Rounded to zero assert SentinelAPI.get_products_size(products) == 0
def test_missing_dependency_dataframe(monkeypatch): api = SentinelAPI("mock_user", "mock_password") with pytest.raises(ImportError): monkeypatch.setitem(sys.modules, "pandas", None) api.to_dataframe({"test":"test"}) with pytest.raises(ImportError): monkeypatch.setitem(sys.modules, "geopandas", None) api.to_geodataframe({"test":"tst"})
def test_api_query_format_escape_spaces(api): query = SentinelAPI.format_query(ingestiondate=('NOW-1DAY', 'NOW')) assert query == 'ingestiondate:[NOW-1DAY TO NOW]' query = SentinelAPI.format_query(ingestiondate='[NOW-1DAY TO NOW]') assert query == 'ingestiondate:[NOW-1DAY TO NOW]' query = SentinelAPI.format_query(ingestiondate=' [NOW-1DAY TO NOW] ') assert query == 'ingestiondate:[NOW-1DAY TO NOW]' query = SentinelAPI.format_query(relativeorbitnumber=' {101 TO 103} ') assert query == 'relativeorbitnumber:{101 TO 103}' query = SentinelAPI.format_query(filename='S3A_OL_2* ') assert query == 'filename:S3A_OL_2*' query = SentinelAPI.format_query(timeliness='Non Time Critical') assert query == r'timeliness:Non\ Time\ Critical' query = SentinelAPI.format_query(timeliness='Non\tTime\tCritical') assert query == r'timeliness:Non\ Time\ Critical' assert api.count(timeliness='Non Time Critical') > 0 # Allow for regex weirdness query = SentinelAPI.format_query(timeliness='.+ Critical') assert query == r'timeliness:.+\ Critical' assert api.count(timeliness='.+ Critical') > 0 query = SentinelAPI.format_query(identifier='/S[123 ]A.*/') assert query == r'identifier:/S[123 ]A.*/' assert api.count(identifier='/S[123 ]A.*/') > 0
def test_trigger_lta_failed(http_status_code): api = SentinelAPI("mock_user", "mock_password") request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value" with requests_mock.mock() as rqst: rqst.get( request_url, status_code=http_status_code ) with pytest.raises(SentinelAPILTAError) as excinfo: api._trigger_offline_retrieval(request_url)
def test_trigger_lta_accepted(): api = SentinelAPI("mock_user", "mock_password") request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value" with requests_mock.mock() as rqst: rqst.get( request_url, text="Mock trigger accepted", status_code=202 ) assert api._trigger_offline_retrieval(request_url) == 202
def test_order_by(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), ("20151219", "20151228"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10), order_by="cloudcoverpercentage, -beginposition" ) assert len(products) == 3 vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) == vals
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.post( 'https://scihub.copernicus.eu/apihub/search?format=json', text="{Invalid JSON response", status_code=200 ) with pytest.raises(SentinelAPIError) as excinfo: api.query( area=geojson_to_wkt(read_geojson(FIXTURES_DIR + "/map.geojson")), date=("20151219", "20151228"), platformname="Sentinel-2" ) assert excinfo.value.msg == "Invalid API response."
def test_query_by_names(): api = SentinelAPI(**_api_auth) names = ["S2A_MSIL1C_20170205T105221_N0204_R051_T31TCF_20170205T105426", "S1A_EW_GRDH_1SDH_20141003T003840_20141003T003920_002658_002F54_4DD1"] expected = {names[0]: {"2f379a52-3041-4b92-a8a8-92bddc495594"}, names[1]: {"2d116e6a-536e-49b3-a587-5cd6b5baa3c9"}} result = api._query_names(names) assert list(result) == names for name in names: assert set(result[name]) == expected[name] result2 = api._query_names(names * 100) assert result == result2
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]" ) assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296"
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), (datetime(2014, 10, 10), datetime(2014, 12, 31)), producttype="GRD" ) footprints = api.to_geojson(products) for footprint in footprints['features']: assert not footprint['geometry'].errors() with open(FIXTURES_DIR + '/expected_search_footprints_s1.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_download(tmpdir): api = SentinelAPI(**_api_auth) uuid = "1f62a176-c980-41dc-b3a1-c735d660c910" filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E" expected_path = tmpdir.join(filename + ".zip") # Download normally product_info = api.download(uuid, str(tmpdir), checksum=True) assert expected_path.samefile(product_info["path"]) assert product_info["title"] == filename assert product_info["size"] == expected_path.size() hash = expected_path.computehash() modification_time = expected_path.mtime() expected_product_info = product_info del expected_product_info['path'] # File exists, test with checksum # Expect no modification product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.mtime() == modification_time del product_info['path'] assert product_info == expected_product_info # File exists, test without checksum # Expect no modification product_info = api.download(uuid, str(tmpdir), check_existing=False) assert expected_path.mtime() == modification_time del product_info['path'] assert product_info == expected_product_info # Create invalid file, expect re-download with expected_path.open("wb") as f: f.seek(expected_product_info["size"] - 1) f.write(b'\0') assert expected_path.computehash("md5") != hash product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.computehash("md5") == hash del product_info['path'] assert product_info == expected_product_info # Test continue with expected_path.open("rb") as f: content = f.read() with expected_path.open("wb") as f: f.write(content[:100]) assert expected_path.computehash("md5") != hash product_info = api.download(uuid, str(tmpdir), check_existing=True) assert expected_path.computehash("md5") == hash del product_info['path'] assert product_info == expected_product_info # Test MD5 check with expected_path.open("wb") as f: f.write(b'abcd' * 100) assert expected_path.computehash("md5") != hash with pytest.raises(InvalidChecksumError): api.download(uuid, str(tmpdir), check_existing=True, checksum=True)
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD" ) footprints = api.to_geojson(products) for footprint in footprints['features']: validation = geojson.is_valid(footprint['geometry']) assert validation['valid'] == 'yes', validation['message'] with open('tests/expected_search_footprints_s1.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_to_geopandas(products): gdf = SentinelAPI.to_geodataframe(products) assert type(gdf).__name__ == 'GeoDataFrame' print(gdf.unary_union.area) assert gdf.unary_union.area == pytest.approx(89.6, abs=0.1) assert len(gdf) == len(products) assert gdf.crs == {'init': 'epsg:4326'}
def test_progressbars(capsys): api = SentinelAPI("mock_user", "mock_password") testfile_md5 = hashlib.md5() true_path = FIXTURES_DIR + "/expected_search_footprints_s1.geojson" with open(true_path, "rb") as testfile: testfile_md5.update(testfile.read()) real_md5 = testfile_md5.hexdigest() assert api._md5_compare(true_path, real_md5) is True out, err = capsys.readouterr() assert "checksumming" in err api = SentinelAPI("mock_user", "mock_password", show_progressbars=False) assert api._md5_compare(FIXTURES_DIR + "/map.geojson", real_md5) is False out, err = capsys.readouterr() assert out == "" assert "checksumming" not in err
def test_SentinelAPI_wrong_credentials(small_query): api = SentinelAPI("wrong_user", "wrong_password") with pytest.raises(SentinelAPIError) as excinfo: api.query(**small_query) assert excinfo.value.response.status_code == 401 with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.response.status_code == 401 with pytest.raises(SentinelAPIError) as excinfo: api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.response.status_code == 401 with pytest.raises(SentinelAPIError) as excinfo: api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b']) assert excinfo.value.response.status_code == 401
def test_large_query(): api = SentinelAPI(**_api_kwargs) full_products = list(api.query(**_large_query)) assert api._last_query == ( 'beginPosition:[2015-12-01T00:00:00Z TO 2015-12-31T00:00:00Z] ' 'footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))"') assert api._last_response.status_code == 200 assert len(full_products) > api.page_size result = list(api.query(limit=150, **_large_query)) assert result == full_products[:150] result = list(api.query(limit=20, offset=90, **_large_query)) assert result == full_products[90:110] result = list(api.query(limit=20, offset=len(full_products) - 10, **_large_query)) assert result == full_products[-10:]
def smallest_archived_products(api_kwargs, vcr): n = 3 api = SentinelAPI(**api_kwargs) # Find some small and old products expecting them to be archived due to age. # Can't use the OData API for this as we do for the online products # because the ContentLength value there is not match the true product size. odatas = [] with vcr.use_cassette("smallest_archived_products"): products = api.query(date=(None, "20170101"), size="/.+KB/", limit=10) for uuid in products: odata = api.get_product_odata(uuid) if not odata["Online"]: odatas.append(odata) if len(odatas) == n: break assert len(odatas) == n return odatas
def test_format_url(): api = SentinelAPI(**_api_auth) start_row = 0 url = api._format_url(offset=start_row) assert url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format( rows=api.page_size, start=start_row) limit = 50 url = api._format_url(limit=limit, offset=start_row) assert url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format( rows=limit, start=start_row) url = api._format_url(limit=api.page_size + 50, offset=start_row) assert url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format( rows=api.page_size, start=start_row) url = api._format_url(order_by="beginposition desc", limit=api.page_size + 50, offset=10) assert url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}' \ '&orderby={orderby}'.format(rows=api.page_size, start=10, orderby="beginposition desc")
def instance(cls) -> SentinelManager: if cls._instance is None: scihub_user = os.getenv(SCIHUB_USER) scihub_password = os.getenv(SCIHUB_PASS) sentinel_api = SentinelAPI(scihub_user, scihub_password, SCIHUB_URL) cls._instance = SentinelManager(sentinel_api=sentinel_api) return cls._instance
def _sentinelsat_query(user, passwd, footprint_wkt, start_date, end_date, cloud=50): """ Fetches a list of Sentinel-2 products """ # Originally by Ciaran Robb api = SentinelAPI(user, passwd) products = api.query(footprint_wkt, date=(start_date, end_date), platformname="Sentinel-2", cloudcoverpercentage="[0 TO {}]".format(cloud), url="https://apihub.copernicus.eu/apihub/") return products
class sentinelWrapper: def __init__(self): logger.info("connect to sentinel API") # connection to API for search queries and download requests self.api = SentinelAPI(config.copernicusUser, config.copernicusPW, config.copernicusURL) logger.info("sentinel API connected") def getSentinelProducts(self, lat, lon, dateFrom, dateTo, platform, **kwargs): logger.info("start sentinel query") # convert geolocation coordinates to wkt format footprint = geojson_to_wkt(Point((lon, lat))) # prepare parameter for cloud coverage if "cloudcoverpercentage" in kwargs: kwargs["cloudcoverpercentage"] = (0, kwargs["cloudcoverpercentage"]) # search query result = self.api.query(footprint, date=(dateFrom, dateTo), platformname=platform, **kwargs) logger.info("sentinel query complete") return result # download multiple sentinel products (list of product IDs) def downloadSentinelProducts(self, products): logger.info("start downloading sentinel product list") self.api.download_all(products, config.bigTilesDir) logger.info("download complete") # download sentinel product with certain product ID def downloadSentinelProduct(self, productID): logger.info("start downloading sentinel product") self.api.download(productID, config.bigTilesDir) logger.info("download complete")
def search(user, psswd, sensor, file, start, end, maxcloud): ''' Searching for all the available scenes in the specified region and with the parameters provided by user ''' url = 'https://scihub.copernicus.eu/dhus' api = SentinelAPI(user, psswd, url) footprint = geojson_to_wkt(read_geojson(file)) if sensor == 's1': products = api.query(footprint, date=(start, end), platformname='Sentinel-1', orbitdirection='ASCENDING', polarisationmode='VV VH', producttype='GRD', sensoroperationalmode='IW') for x in products: logging.info("\t {} {} ".format(products[x]["filename"], products[x]["size"])) logging.info("\t Found {} scenes in the region specified".format( len(products))) with open("scenes_s1_found.txt", "w") as f: for i in products: f.write(products[i]["uuid"] + "\n") return (products, api) if sensor == 's2': products = api.query(footprint, date=(start, end), platformname='Sentinel-2', cloudcoverpercentage=(0, maxcloud)) for x in products: logging.info("\t {} {} ".format(products[x]["filename"], products[x]["size"])) logging.info("\t Found {} scenes in the region specified".format( len(products))) with open("scenes_s2_found.txt", "w") as f: for i in products: f.write(products[i]["identifier"] + "\n") return (products)
def get_sentinel_images(reef, start_date, end_date, num_images, user, password): """ Method to download Sentinel-2 images using Sentinel API Params - 1. reef (str) - Coral reef object 2. start_date (str) - starting date of sentinel images 3. end_date (str) - end date of sentinel images 4. num_images (int) - number of sentinel-2 images to download 5. user (str) - username on scihub.copernicus.eu 6. password (str) - password on scihub.copernicus.eu """ #login into api api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') #load in geojson of reef reef_path = reef.get_path() reef_gjson_fp = os.path.join(reef_path, reef.get_reef_name() + '.geojson') reef_footprint = geojson_to_wkt(read_geojson(reef_gjson_fp)) #query sentinel sat api products = api.query(reef_footprint,date = (start_date, end_date),platformname = 'Sentinel-2'\ ,area_relation = 'Intersects',processinglevel = 'Level-2A',\ order_by = 'cloudcoverpercentage') #creating folder for saving sentinel images sentinel_path = os.path.join(reef_path, 'SAFE files') if not os.path.exists(sentinel_path): os.makedirs(sentinel_path) #downloading num_images for i, x in enumerate(products.items()): k, v = x[0], x[1] if i < num_images: api.download(k, directory_path=sentinel_path) #unzipping files for file in os.listdir(sentinel_path): if file.endswith('.zip'): file_path = os.path.join(sentinel_path, file) out_path = os.path.join(sentinel_path, file.split('.')[0]) if os.path.exists(file_path) and not os.path.exists(out_path): with zipfile.ZipFile(file_path, "r") as zip_ref: zip_ref.extractall(sentinel_path) os.remove(file_path)
def test_footprints_s2(products): footprints = SentinelAPI.to_geojson(products) for footprint in footprints['features']: assert not footprint['geometry'].errors() with open(FIXTURES_DIR + '/expected_search_footprints_s2.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_SentinelAPI_wrong_credentials(small_query): api = SentinelAPI("wrong_user", "wrong_password") @contextmanager def assert_exception(): with pytest.raises(UnauthorizedError) as excinfo: yield assert excinfo.value.response.status_code == 401 assert "Invalid user name or password" in excinfo.value.msg with assert_exception(): api.query(**small_query) with assert_exception(): api.get_product_odata("8df46c9e-a20c-43db-a19a-4240c2ed3b8b") with assert_exception(): api.download("8df46c9e-a20c-43db-a19a-4240c2ed3b8b") with assert_exception(): api.download_all(["8df46c9e-a20c-43db-a19a-4240c2ed3b8b"])
def test_download_many(tmpdir): runner = CliRunner() command = ['--user', API_AUTH[0], '--password', API_AUTH[1], '--uuid', '1f62a176-c980-41dc-b3a1-c735d660c910,5618ce1b-923b-4df2-81d9-50b53e5aded9,d8340134-878f-4891-ba4f-4df54f1e3ab4', '--download', '--path', str(tmpdir)] # Download 3 tiny products result = runner.invoke( cli, command, catch_exceptions=False ) assert result.exit_code == 0 # Should not re-download result = runner.invoke( cli, command, catch_exceptions=False ) assert result.exit_code == 0 # clean up for f in tmpdir.listdir(): f.remove() # Prepare a response with an invalid checksum product_id = 'd8340134-878f-4891-ba4f-4df54f1e3ab4' url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % product_id api = SentinelAPI(*API_AUTH) json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" # Force one download to fail with requests_mock.mock(real_http=True) as rqst: rqst.get(url, json=json) rqst.get(url, json=json) # md5 flag set (implicitly), should raise an exception result = runner.invoke( cli, command, catch_exceptions=False ) assert result.exit_code == 0 assert 'is corrupted' in result.output assert tmpdir.join('corrupt_scenes.txt').check() with tmpdir.join('corrupt_scenes.txt').open() as f: assert product_id in f.read() # clean up tmpdir.remove()
def _get_smallest(api_kwargs, cassette, online, n=3): api = SentinelAPI(**api_kwargs) url = "{}odata/v1/Products?$format=json&$top={}&$orderby=ContentLength&$filter=Online%20eq%20{}".format( api_kwargs["api_url"], n, "true" if online else "false") with cassette: r = api.session.get(url) odata = [_parse_odata_response(x) for x in r.json()["d"]["results"]] assert len(odata) == n return odata
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), ("20151219", "20151228"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10) ) assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296" # For order-by test vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) != vals
def __init__(self,index=None): #initialize sentinelsat self.api = SentinelAPI('username', 'password', 'https://scihub.copernicus.eu/dhus') #prepare queue for pending satellite products self.queue = deque([]) # if no index is set, load from indexLog.txt file if index is None: self.index = int(open('indexLog.txt').readline()) print("Loaded previous index from file. Starting from index "+ str(self.index)) else: self.index = int(index) #load fire training dataset file with open('fires.json') as f: self.data = json.load(f)
def test_footprints_s2(products, fixture_path): footprints = SentinelAPI.to_geojson(products) for footprint in footprints["features"]: assert not footprint["geometry"].errors() with open(fixture_path("expected_search_footprints_s2.geojson")) as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_query_by_names(): api = SentinelAPI(**_api_auth) names = [ "S2A_MSIL1C_20170205T105221_N0204_R051_T31TCF_20170205T105426", "S1A_EW_GRDH_1SDH_20141003T003840_20141003T003920_002658_002F54_4DD1" ] expected = { names[0]: {"2f379a52-3041-4b92-a8a8-92bddc495594"}, names[1]: {"2d116e6a-536e-49b3-a587-5cd6b5baa3c9"} } result = api._query_names(names) assert list(result) == names for name in names: assert set(result[name]) == expected[name] result2 = api._query_names(names * 100) assert result == result2
def test_scihub_unresponsive(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout) with pytest.raises(requests.exceptions.Timeout) as excinfo: api.query(**_small_query) with pytest.raises(requests.exceptions.Timeout) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.Timeout) as excinfo: api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.Timeout) as excinfo: api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
def _get_smallest(api_kwargs, cassette, online, n=3): api = SentinelAPI(**api_kwargs) url = '{}odata/v1/Products?$format=json&$top={}&$orderby=ContentLength&$filter=Online%20eq%20{}'.format( api_kwargs['api_url'], n, 'true' if online else 'false') with cassette: r = api.session.get(url) odata = [_parse_odata_response(x) for x in r.json()['d']['results']] assert len(odata) == n return odata
def test_get_product_odata_scihub_down(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.get( "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json", text="Mock SciHub is Down", status_code=503) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') rqst.get( "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json", text="Mock SciHub is Down", status_code=200) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert excinfo.value.msg == "Mock SciHub is Down" # Test with a real server response rqst.get( "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json", text=textwrap.dedent("""\ <!doctype html> <title>The Sentinels Scientific Data Hub</title> <link href='https://fonts.googleapis.com/css?family=Open+Sans' rel='stylesheet' type='text/css'> <style> body { text-align: center; padding: 125px; background: #fff;} h1 { font-size: 50px; } body { font: 20px 'Open Sans',Helvetica, sans-serif; color: #333; } article { display: block; text-align: left; width: 820px; margin: 0 auto; } a { color: #0062a4; text-decoration: none; font-size: 26px } a:hover { color: #1b99da; text-decoration: none; } </style> <article> <img alt="" src="/datahub.png" style="float: left;margin: 20px;"> <h1>The Sentinels Scientific Data Hub will be back soon!</h1> <div style="margin-left: 145px;"> <p> Sorry for the inconvenience,<br/> we're performing some maintenance at the moment.<br/> </p> <!--<p><a href="https://scihub.copernicus.eu/news/News00098">https://scihub.copernicus.eu/news/News00098</a></p>--> <p> We'll be back online shortly! </p> </div> </article> """), status_code=502) with pytest.raises(SentinelAPIError) as excinfo: api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
def test_too_long_query(api): # Test whether our limit calculation is reasonably correct and # that a relevant error message is provided def create_query(n): return '"' + " a_-.*:,?+~!" * n + '"' # Expect no error q = create_query(162) assert 0.99 < SentinelAPI.check_query_length(q) < 1.0 count = api.count(raw=q) assert count == 0 # Expect HTTP status 500 Internal Server Error q = create_query(163) assert 0.999 <= SentinelAPI.check_query_length(q) < 1.01 with pytest.raises(QueryLengthError) as excinfo: api.count(raw=q) assert "x times the maximum allowed" in str(excinfo.value)
def find_s2_uuid(s2_filename): """ Returns S2 uuid required for download via sentinelsat, based upon an input S2 file/scene name. I.e. S2A_MSIL1C_20180820T223011_N0206_R072_T60KWE_20180821T013410 Assumes esa hub creds stored as env variables. :param s2_file_name: Sentinel-2 scene name :return s2_uuid: download id """ copernicus_username = os.getenv("COPERNICUS_USERNAME") copernicus_pwd = os.getenv("COPERNICUS_PWD") print(f"ESA username: {copernicus_username}") esa_api = SentinelAPI(copernicus_username, copernicus_pwd) if s2_filename[-5:] == '.SAFE': res = esa_api.query(filename=s2_filename) res = esa_api.to_geodataframe(res) return res.uuid.values[0]
def ndvihesaplama(request): api = SentinelAPI('flavves', 'BATUhan123.', 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson('media/map.geojson')) products = api.query(footprint, date=('20191219', date(2019, 12, 29)), platformname='Sentinel-2') # pandas dataframe yap products_df = api.to_dataframe(products) # filtreleme products_df_sorted = products_df.sort_values( ['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) products_df_sorted = products_df_sorted.head(1) df = products_df_sorted NotDefteriKaydi = df.values.tolist() str_denemesi = str(NotDefteriKaydi) Kaydetmeye_basla = list(str_denemesi.split(",")) yerler = [0, 7, 8, 9, 12, 14, 18, 19, 20] isimler = [ "Dosya adı:", "Uydu adı", "Dosya boyutu", "Çekim tarihi", "Orbit numarası", "Bulut", "vejetasyon", "su", "not vejetasyon" ] i = 0 with open("media/books/txt/deneme.txt", "w") as dosya: for sira in yerler: print(isimler[i] + ":" + Kaydetmeye_basla[sira]) yaz = (isimler[i] + ":" + Kaydetmeye_basla[sira]) i = i + 1 dosya.write(yaz) dosya.write("\n") dosya.close() file_path = ('media\\books\\txt\\deneme.txt') #full path to text. data_file = open(file_path, 'r') data = data_file.read() data = list(data.split("\n")) context = {'deneme': data} return render(request, "todo_app/ndvihesaplama.html", context, backend().badana())
def test_footprints_s2(products): footprints = SentinelAPI.to_geojson(products) for footprint in footprints['features']: validation = geojson.is_valid(footprint['geometry']) assert validation['valid'] == 'yes', validation['message'] with open('tests/expected_search_footprints_s2.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def __init__(self, *users, **kwargs): """Create sentinel api instance.""" users_context = list(users) show_progress = kwargs.get('progress', False) parallel = kwargs.get('parallel', False) self.progress = show_progress if not users: if 'username' not in kwargs or 'password' not in kwargs: raise RuntimeError( 'Missing "username"/"password" for USGS provider.') auth = kwargs self.parallel = parallel if parallel: users_context.append(auth) self.kwargs = kwargs else: self.parallel = True auth = users[0] options = dict() if auth.get('api_url'): options['api_url'] = auth['api_url'] self.api = SentinelAPI(auth['username'], auth['password'], show_progressbars=show_progress, **options) if self.parallel: self.clients = UserClients(users_context) self.collections['Sentinel-1'] = Sentinel1 self.collections['GRD'] = Sentinel1 self.collections['Sentinel-2'] = Sentinel2 self.collections['S2MSI1C'] = Sentinel2 self.collections['S2MSI2A'] = Sentinel2
def getscenes(): api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') # download single scene by known product id #api.download(<product_id>) t0 = datetime.now() - timedelta(days=7) tf = datetime.now() # search by polygon, time, and SciHub query keywords footprint = geojson_to_wkt( read_geojson(home['parameters'] + '/extent_ce.geojson')) products = api.query(footprint, date=(date(t0.year, t0.month, t0.day), date(tf.year, tf.month, tf.day)), platformname='Sentinel-2', cloudcoverpercentage=(0, 20)) # download all results from the search #s2aIn = '/home/delgado/Documents/tmp' # in case you are just testing api.download_all(products, directory_path=s2aIn)
def test_download_all(tmpdir): api = SentinelAPI(**_api_auth) # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength # filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E", # "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", # "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"] # Corresponding IDs ids = [ "5618ce1b-923b-4df2-81d9-50b53e5aded9", "d8340134-878f-4891-ba4f-4df54f1e3ab4", "1f62a176-c980-41dc-b3a1-c735d660c910" ] # Download normally product_infos, failed_downloads = api.download_all(ids, str(tmpdir)) assert len(failed_downloads) == 0 assert len(product_infos) == len(ids) for product_id, product_info in product_infos.items(): pypath = py.path.local(product_info['path']) assert pypath.check(exists=1, file=1) assert pypath.purebasename in product_info['title'] assert pypath.size() == product_info["size"] # Force one download to fail id, product_info = list(product_infos.items())[0] path = product_info['path'] py.path.local(path).remove() with requests_mock.mock(real_http=True) as rqst: url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" rqst.get(url, json=json) product_infos, failed_downloads = api.download_all(ids, str(tmpdir), max_attempts=1, checksum=True) assert len(failed_downloads) == 1 assert len(product_infos) + len(failed_downloads) == len(ids) assert id in failed_downloads tmpdir.remove()
def download_products(targetfolder, user, password): # Create query and download files # # password is retrieved from command line argument, user name is currently hardcoded api = SentinelAPI(user, password) # can be a list of tile ids tiles = ['34PHV'] # create dictionary with core keywords query_kwargs = { 'platformname': 'Sentinel-2', 'producttype': 'S2MSI1C', 'relativeorbitnumber': '064', 'date': ('20180301', '20180310')} print('Query arguments are:') for i in query_kwargs: print(i, ': ', query_kwargs[i]) # create empty ordered dictionary in which to insert query result for each tile products = OrderedDict() # for each tile in 'tiles', copy the dictionary of core keywords from above (without tiles), insert the corresponding # tile id (or filename for data before Apr 2017), # then apply query using the modified dictionary, and finally update 'products' (appending query result) for tile in tiles: kw = query_kwargs.copy() # print(kw) kw['tileid'] = tile # only works for products after 2017-03-31 # kw['filename'] = '*_{}_*'.format(tile) # products after 2016-12-01 pp = api.query(**kw) products.update(pp) # download selected products print('Found', len(products), 'product(s).') api.download_all(products, directory_path = targetfolder) print("Downloaded " + str(len(products)) + " product(s).")
def get_products_aoi(extent_file, accounts_file, start_date, end_date): ''' Creates a ordered dictionary of products that intersect with the extent of a raster file in a provided date interval @type extent_file: str @param extent_file: file path of the airborne data @type accounts_file: str @param accounts_file: file path of the accounts text file @type start_date: str or datetime @param start_date: begining of period of interest @type end_date: str or datetime @param end_date: end of period of interest @rtype: Ordered Dictionary @return: products ''' # Sets up credential stuff and API credentials = account(accounts_file) api = SentinelAPI(credentials.values()[0][0], credentials.values()[0][1], 'https://scihub.copernicus.eu/dhus') # Gets the extent and puts it in WKT format AOI = get_extent(extent_file) points = [] for elem in AOI['coordinates']: x, y = elem points += [str(round(x, 7)) + ' ' + str(round(y, 7))] AOI_wkt = 'POLYGON ((%s, %s, %s, %s, %s))' % ( points[0], points[1], points[2], points[3], points[0]) # Calls the query to get the result of the query products = api.query(AOI_wkt, initial_date=start_date, end_date=end_date, platformname='Sentinel-2') if len(products) == 0: raise ValueError('No products match the entered search criteria.') else: print str(len(products)) + ' match the search criteria.' return products, credentials
def download_extract_s2_esa(scene_uuid, down_dir, original_scene_dir): """ Download a single S2 scene from ESA via sentinelsat based upon uuid. Assumes esa hub creds stored as env variables. :param scene_uuid: S2 download uuid from sentinelsat query :param down_dir: directory in which to create a downloaded product dir :param original_scene_dir: :return: """ # if unzipped .SAFE file doesn't exist then we must do something if not os.path.exists(original_scene_dir): # if downloaded .zip file doesn't exist then download it if not os.path.exists(original_scene_dir.replace('.SAFE/', '.zip')): logging.info('Downloading ESA scene zip: {}'.format( os.path.basename(original_scene_dir))) copernicus_username = os.getenv("COPERNICUS_USERNAME") copernicus_pwd = os.getenv("COPERNICUS_PWD") logging.debug(f"ESA username: {copernicus_username}") esa_api = SentinelAPI(copernicus_username, copernicus_pwd) esa_api.download(scene_uuid, down_dir, checksum=True) # extract downloaded .zip file logging.info('Extracting ESA scene: {}'.format(original_scene_dir)) zip_ref = zipfile.ZipFile(original_scene_dir.replace('.SAFE/', '.zip'), 'r') zip_ref.extractall(os.path.dirname(down_dir)) zip_ref.close() else: logging.warning( 'ESA scene already extracted: {}'.format(original_scene_dir)) # remove zipped scene but onliy if unzipped if os.path.exists(original_scene_dir) & os.path.exists( original_scene_dir.replace('.SAFE/', '.zip')): logging.info('Deleting ESA scene zip: {}'.format( original_scene_dir.replace('.SAFE/', '.zip'))) os.remove(original_scene_dir.replace('.SAFE/', '.zip'))
def _downloader(self, datelist, sensor, continent, obs_folder, obs_lst, uid, psw): local_obs_date = self._available_dec(obs_lst) api = SentinelAPI(uid, psw) if continent == 'Africa': footprint = geojson_to_wkt( read_geojson(r'L:/HSL/poi/AOI_Africa.geojson')) elif continent == 'West_Asia': footprint = geojson_to_wkt( read_geojson(r'L:/HSL/poi/AOI_West_Asia.geojson')) elif continent == 'Europe': footprint = geojson_to_wkt( read_geojson(r'L:/HSL/poi/AOI_Europe.geojson')) products = api.query(footprint, filename=f'S3{sensor}_*', producttype='SY_2_V10___', date=(datelist[0], datelist[-1])) products_df = api.to_dataframe(products) if products_df.size != 0: products_df_sorted = products_df.sort_values(['ingestiondate'], ascending=[True]) products_df_sorted = products_df_sorted.head(24) download_list = products_df_sorted[ ~products_df_sorted['beginposition']. isin(local_obs_date)].drop_duplicates(subset=['beginposition'], keep='last') if download_list.size != 0: downloaded = api.download_all(download_list.index, directory_path=obs_folder, n_concurrent_dl=2) if len(downloaded[2]) != 0: print('Info ! Some dataset were not downloaded') self._unzipper(obs_folder) else: pass
def job(): #Sentinel műholdképek keresése SciHub-on, keresési kritériumok alapján api = SentinelAPI(oah_user,oah_pass, 'https://apihub.copernicus.eu/apihub/') count=api.count(area=wkt, date=(start_date, end_date), platformname=platformname,area_relation='Contains',raw=None,cloudcoverpercentage=(min_cloud,max_cloud),limit=20, processinglevel = processinglevel) now = datetime.now() now = now.strftime("%d/%m/%Y %H:%M:%S") print(now+' - Új adat keresése') if count>0: # Elérhető műholdképek adatainak dataframe-be írása products = api.query(area=wkt, date=(start_date, end_date), platformname=platformname,area_relation='Contains',raw=None,cloudcoverpercentage=(min_cloud,max_cloud),limit=20, processinglevel = processinglevel) products_df = api.to_dataframe(products) detail=products_df.iat[0,4] # E-mailbe írandó adatok formázása img_sat=products_df.iloc[0,36] # Műhold név img_proc_lvl=products_df.iloc[0,37] # Feldolgozási szint img_date=products_df.iloc[0,4][6:16] # Felvétel rögzítésének dátuma img_time=products_df.iloc[0,4][17:25] # Felvétel rögzítésének időpontja img_cloud=str(products_df.iloc[0,21])[:5]+' %' # Felvétel felhőzöttsége #E-mail tartalom előkészítése subject="Új műholdkép - "+img_date body="A vizsgált területről készült új műholdkép adatai.\n\n"+'Műhold: '+img_sat+'\n'+'Feldolgozási szint: '+img_proc_lvl+'\n'+'Felvétel rögzítve: '+img_date+', '+img_time+'\n'+'Felvétel felhőzöttsége: '+img_cloud message=f'Subject:{subject}\n\n{body}' #E-mail küldése és keresés szüneteltetése context = ssl.create_default_context() with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: server.login(sender_email, password) server.sendmail(sender_email, receiver_email, message.encode("utf8")) now = datetime.now() now = now.strftime("%d/%m/%Y %H:%M:%S") print(now+' - E-mail elküldve') time.sleep(82800) #23 óra return else: # Jelezzen ha nincs új műholdkép print(now+' - Nem érhető el új műholdkép') return