Beispiel #1
0
def test_scihub_unresponsive():
    timeout_connect = 6
    timeout_read = 6.6
    timeout = (timeout_connect, timeout_read)

    api = SentinelAPI("mock_user", "mock_password", timeout=timeout)

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ReadTimeout)
        with pytest.raises(requests.exceptions.ReadTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
def downloadingData(aoi, collectionDate, plName, prLevel, clouds, username,
                    password, directory):
    '''
    Downloads the Sentinel2 - Data with the given parameters

    Parameters:
        aoi (str): The type and the coordinates of the area of interest
        collectionDate datetime 64[ns]): The date of the data
        plName (str): The name of the platform
        prLevel (str): The name of the process
        clouds (tuple of ints): Min and max of cloudcoverpercentage
        username (str): The username of the Copernicus SciHub
        password (str): The password of the Copernicus SciHub
        directory (str): Pathlike string to the directory
    '''

    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    '''Choosing the data with bounding box (footprint), date, platformname, processinglevel and cloudcoverpercentage'''
    products = api.query(aoi,
                         date=collectionDate,
                         platformname=plName,
                         processinglevel=prLevel,
                         cloudcoverpercentage=clouds)
    '''Downloads the choosen files from Scihub'''
    if len(products) == 0:
        raise Exception("No data for this params")
    print("Start downloading " + str(len(products)) + " product(s)")
    api.download_all(products, directory, max_attempts=10, checksum=True)
    print("All necassary downloads done")
Beispiel #3
0
def test_download_all(tmpdir):
    api = SentinelAPI(**_api_auth)
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
                 "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
                 "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"]

    ids = list(api.query_raw(" OR ".join(filenames)))
    assert len(ids) == len(filenames)

    # Download normally
    product_infos, failed_downloads = api.download_all(ids, str(tmpdir))
    assert len(failed_downloads) == 0
    assert len(product_infos) == len(filenames)
    for product_id, product_info in product_infos.items():
        pypath = py.path.local(product_info['path'])
        assert pypath.purebasename in filenames
        assert pypath.check(exists=1, file=1)
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    id, product_info = list(product_infos.items())[0]
    path = product_info['path']
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        product_infos, failed_downloads = api.download_all(
            ids, str(tmpdir), max_attempts=1, checksum=True)
        assert len(failed_downloads) == 1
        assert len(product_infos) + len(failed_downloads) == len(filenames)
        assert id in failed_downloads
Beispiel #4
0
def get_scenes():
    logger = logging.getLogger('root')

    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    logging.info(api.api_url)
    # download single scene by known product id
    #api.download(<product_id>)
    tf = datetime.now()
    # tf=datetime(2018,1,10)
    t0 = tf - timedelta(days=7)

    # get_s1_orbits(t0,tf)

    # search by polygon, time, and SciHub query keywords
    footprint = geojson_to_wkt(
        read_geojson(home['parameters'] + '/extent_' + location['region'] +
                     '.geojson'))

    products_s1a = api.query(footprint,
                             date=(date(t0.year, t0.month,
                                        t0.day), date(tf.year, tf.month,
                                                      tf.day)),
                             producttype="GRD",
                             platformname='Sentinel-1')

    for item in products_s1a:
        logging.info(products_s1a[item]['title'])

    # download all results from the search
    # already downloaded files are skipped
    api.download_all(products_s1a, directory_path=sarIn)
Beispiel #5
0
def getscenes_test_dataset():
    logger = logging.getLogger('root')

    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    logging.info(api.api_url)
    # download single scene by known product id
    #api.download(<product_id>)
    t0 = datetime.now() - timedelta(months=8)
    tf = datetime.now()
    # search by polygon, time, and SciHub query keywords
    footprint = geojson_to_wkt(
        read_geojson(home['parameters'] + '/madalena.geojson'))

    products_s1a = api.query(footprint,
                             date=(date(t0.year, t0.month,
                                        t0.day), date(tf.year, tf.month,
                                                      tf.day)),
                             producttype="GRD",
                             platformname='Sentinel-1')
    for item in products_s1a:
        logging.info(products_s1a[item]['title'])

    tests1In = home['scratch'] + "/test_dataset/s1a_scenes/in"

    # download all results from the search
    api.download_all(products_s1a, directory_path=tests1In)
def downloadingData(aoi, collectionDate, plName, prLevel, clouds, username,
                    password, directory):
    '''
    Downloads the Sentinel2 - Data with the given parameters

    Parameters:
        aoi (str): The type and the coordinates of the area of interest
        collectionDate (datetime 64[ns]): The date of the data
        plName (str): The name of the platform
        prLevel (str): The name of the process
        clouds (tuple of ints): Min and max of cloudcoverpercentage
        username (str): The username of the Copernicus SciHub
        password (str): The password of the Copernicus SciHub
        directory (str): Pathlike string to the directory
    '''

    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    '''Choosing the data with bounding box (footprint), date, platformname, processinglevel and cloudcoverpercentage'''
    products = api.query(aoi,
                         date=collectionDate,
                         platformname=plName,
                         processinglevel=prLevel,
                         cloudcoverpercentage=clouds)
    '''Filters the products and sorts by cloudcoverpercentage'''
    products_gdf = api.to_geodataframe(products)
    products_gdf_sorted = products_gdf.sort_values(['cloudcoverpercentage'],
                                                   ascending=[True])
    '''Downloads the choosen files from Scihub'''
    products_gdf_sorted.to_csv(os.path.join(directory, 'w'))
    api.download_all(products, directory, max_attempts=10, checksum=True)
Beispiel #7
0
def test_skip_products(number_to_skip):
    """ Test if skipping works correctly with mixed list of uuid, pdf, xml
    uuid: (5618ce1b-923b-4df2-81d9-50b53e5aded9) S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C
    pdf:  (1f62a176-c980-41dc-b3a1-c735d660c910) S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E
    xml:  (c8f3acd1-67da-4d9f-bd42-86278e3b46a8) S2A_OPER_PRD_MSIL1C_PDMC_20161013T075059_R111_V20161012T161812_20161012T161807
    """
    api = SentinelAPI(**_api_auth)
    ids = [
        "5618ce1b-923b-4df2-81d9-50b53e5aded9",
        "1f62a176-c980-41dc-b3a1-c735d660c910",
        "c8f3acd1-67da-4d9f-bd42-86278e3b46a8"
    ]

    skip_products = [
        "5618ce1b-923b-4df2-81d9-50b53e5aded9",
        path.join(FIXTURES_DIR, "title_pdf"),
        path.join(FIXTURES_DIR, "title_xml")
    ]

    product_infos, _ = api.download_all(
        ids,
        path.join(FIXTURES_DIR, "vcr_cassettes"),
        skip_products=skip_products[0:number_to_skip])

    assert 3 - number_to_skip == len(product_infos)
Beispiel #8
0
def test_scihub_unresponsive():
    api = SentinelAPI("mock_user", "mock_password")

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.Timeout) as excinfo:
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.Timeout) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.Timeout) as excinfo:
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.Timeout) as excinfo:
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
Beispiel #9
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI("wrong_user", "wrong_password")
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
    assert excinfo.value.response.status_code == 401
Beispiel #10
0
class Download_Sentinel:
    def __init__(self,
                 username,
                 password,
                 geo_json,
                 platform_name='Sentinel-2',
                 processinglevel='Level-2A',
                 date_s='NOW-3DAYS',
                 date_e='NOW',
                 cloud=(0, 5)):
        """

        Parameters
        ----------
        username : String
            Copernicus Scihub username.
        password : String
            Copernicus Scihub password.
        geo_json : String
            geo_json path.
        platform_name : String, optional
            DESCRIPTION. The default is 'Sentinel-2'.
        processinglevel : String, optional
            DESCRIPTION. The default is 'Level-2A'.
        date_s : String, optional
            DESCRIPTION. The default is 'NOW-3DAYS'.
        date_e : String, optional
            DESCRIPTION. The default is 'NOW'.
        cloud : Tuple, optional
            DESCRIPTION. The default is (0, 5).

        """

        self.platform_name = platform_name
        self.processinglevel = processinglevel
        self.date_s = date_s
        self.date_e = date_e
        self.cloud = cloud
        self.json = geojson_to_wkt(read_geojson(geo_json))
        self.api = SentinelAPI(username, password)
        self.run()

    def query(self):

        return self.api.query(
            self.json,
            platformname=self.platform_name,
            processinglevel=self.processinglevel,
            date=(self.date_s, self.date_e),
            cloudcoverpercentage=self.cloud,
        )

    def download(self):

        return self.api.download_all(self.query())

    def run(self):
        self.download()
Beispiel #11
0
class sentinelWrapper:
    def __init__(self):

        logger.info("connect to sentinel API")

        # connection to API for search queries and download requests
        self.api = SentinelAPI(config.copernicusUser, config.copernicusPW,
                               config.copernicusURL)

        logger.info("sentinel API connected")

    def getSentinelProducts(self, lat, lon, dateFrom, dateTo, platform,
                            **kwargs):

        logger.info("start sentinel query")

        # convert geolocation coordinates to wkt format
        footprint = geojson_to_wkt(Point((lon, lat)))

        # prepare parameter for cloud coverage
        if "cloudcoverpercentage" in kwargs:
            kwargs["cloudcoverpercentage"] = (0,
                                              kwargs["cloudcoverpercentage"])

        # search query
        result = self.api.query(footprint,
                                date=(dateFrom, dateTo),
                                platformname=platform,
                                **kwargs)

        logger.info("sentinel query complete")

        return result

    # download multiple sentinel products (list of product IDs)
    def downloadSentinelProducts(self, products):
        logger.info("start downloading sentinel product list")
        self.api.download_all(products, config.bigTilesDir)
        logger.info("download complete")

    # download sentinel product with certain product ID
    def downloadSentinelProduct(self, productID):
        logger.info("start downloading sentinel product")
        self.api.download(productID, config.bigTilesDir)
        logger.info("download complete")
Beispiel #12
0
def sat_downloader(userid, password, allcities_list):

    # Connect with the database
    apiurl = 'https://s5phub.copernicus.eu/dhus'
    testAPI = SentinelAPI(user=userid, password=password, api_url=apiurl)

    dir_path = os.path.dirname(os.path.realpath(__file__))

    # For all cities
    for precity in allcities_list:

        city = precity[0]

        # Determine the GPS location needed to search
        geolocator = Nominatim(user_agent="testApp")
        targetcity = geolocator.geocode(city)

        timeframes = [
            'beginposition:[NOW-1DAYS TO NOW]',
            'beginposition:[2020-02-01T00:00:00.000Z TO 2020-02-02T00:00:00.000Z]',
            'beginposition:[2020-01-05T00:00:00.000Z TO 2020-01-06T00:00:00.000Z]'
        ]

        # Determine what files meet the criteria specified
        #timeframe = 'beginposition:[NOW-1DAYS TO NOW]'
        #timeframe = 'beginposition:[2020-02-01T00:00:00.000Z TO 2020-02-02T00:00:00.000Z]'
        #timeframe = 'beginposition:[2020-01-05T00:00:00.000Z TO 2020-01-06T00:00:00.000Z]'

        for timeframe in timeframes:

            satquery_loc = 'footprint:"intersects(' + str(
                targetcity.latitude) + ',' + str(targetcity.longitude) + ')"'
            products = testAPI.query(raw=satquery_loc + ' AND ' + timeframe +
                                     ' AND producttype:L2__NO2___')

            if not products:
                print('Dictionary empty')
            else:

                # Based on that, generate paths of available data
                downloadedfile = products[next(iter(products))]['filename']
                datafilesfolder = r'\\datafiles\\'
                downloadedfile_full = dir_path + datafilesfolder + downloadedfile
                firstdownload = dir_path + datafilesfolder.replace(
                    r'datafiles\\', '') + downloadedfile.replace(
                        '.nc', '.zip')

                # Check to see if you have already downloaded this file
                if os.path.exists(downloadedfile_full):
                    #Exists!
                    print('File exists.. skipping')
                else:
                    # Otherwise, download all results from the search
                    mypath = testAPI.download_all(products)

                    # Move the file to where its supposed to go
                    os.rename(firstdownload, downloadedfile_full)
Beispiel #13
0
def test_SentinelAPI_wrong_credentials(small_query):
    api = SentinelAPI("wrong_user", "wrong_password")

    @contextmanager
    def assert_exception():
        with pytest.raises(UnauthorizedError) as excinfo:
            yield
        assert excinfo.value.response.status_code == 401
        assert "Invalid user name or password" in excinfo.value.msg

    with assert_exception():
        api.query(**small_query)
    with assert_exception():
        api.get_product_odata("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")
    with assert_exception():
        api.download("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")
    with assert_exception():
        api.download_all(["8df46c9e-a20c-43db-a19a-4240c2ed3b8b"])
Beispiel #14
0
def getscenes():
    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')

    # download single scene by known product id
    #api.download(<product_id>)
    t0 = datetime.now() - timedelta(days=7)
    tf = datetime.now()
    # search by polygon, time, and SciHub query keywords
    footprint = geojson_to_wkt(
        read_geojson(home['parameters'] + '/extent_ce.geojson'))
    products = api.query(footprint,
                         date=(date(t0.year, t0.month,
                                    t0.day), date(tf.year, tf.month, tf.day)),
                         platformname='Sentinel-2',
                         cloudcoverpercentage=(0, 20))

    # download all results from the search
    #s2aIn = '/home/delgado/Documents/tmp' # in case you are just testing
    api.download_all(products, directory_path=s2aIn)
Beispiel #15
0
def search_download_sen2_data(user, password, area_polygon, datum, cloudcover,
                              downloadpath):
    from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
    import zipfile
    import os
    import geopandas as gpd
    # connect to API
    api = SentinelAPI(user,
                      password,
                      api_url="https://scihub.copernicus.eu/apihub/")

    #### Test
    outlines_geojson = gpd.read_file("outlines.shp")

    # Avoid Fiona Bug https://github.com/Toblerity/Fiona/issues/438
    try:
        os.remove("outline.geojson")
    except OSError:
        pass
    outlines_geojson.to_file("outline.geojson", driver="GeoJSON")
    area_polygon = "outline.geojson"
    ##### End test

    # Search for products matching query
    products = api.query(area=geojson_to_wkt(read_geojson(area_polygon)),
                         date=datum,
                         platformname="Sentinel-2",
                         producttype="S2MSI1C",
                         cloudcoverpercentage=cloudcover)

    # count number of products matching query
    print(
        "Tiles found:",
        api.count(area=geojson_to_wkt(read_geojson(area_polygon)),
                  date=datum,
                  platformname="Sentinel-2",
                  producttype="S2MSI1C",
                  cloudcoverpercentage=cloudcover), ", Total size: ",
        api.get_products_size(products), "GB. Now downloading those tiles")

    # downloading all products
    download_zip = api.download_all(products, directory_path=downloadpath)

    # Unzip files, delete

    for key in download_zip[0].keys():
        with zipfile.ZipFile(download_zip[0][key]['path']) as zip_file:
            for member in zip_file.namelist():
                filename = os.path.basename(member)
                if not filename:
                    continue
                source = zip_file.open(member)
            source.close()

        os.remove(download_zip[0][key]['path'])
Beispiel #16
0
def test_download_all(tmpdir):
    api = SentinelAPI(**_api_auth)
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    # filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
    #              "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
    #              "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"]

    # Corresponding IDs
    ids = [
        "5618ce1b-923b-4df2-81d9-50b53e5aded9",
        "d8340134-878f-4891-ba4f-4df54f1e3ab4",
        "1f62a176-c980-41dc-b3a1-c735d660c910"
    ]

    # Download normally
    product_infos, failed_downloads = api.download_all(ids, str(tmpdir))
    assert len(failed_downloads) == 0
    assert len(product_infos) == len(ids)
    for product_id, product_info in product_infos.items():
        pypath = py.path.local(product_info['path'])
        assert pypath.check(exists=1, file=1)
        assert pypath.purebasename in product_info['title']
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    id, product_info = list(product_infos.items())[0]
    path = product_info['path']
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        product_infos, failed_downloads = api.download_all(ids,
                                                           str(tmpdir),
                                                           max_attempts=1,
                                                           checksum=True)
        assert len(failed_downloads) == 1
        assert len(product_infos) + len(failed_downloads) == len(ids)
        assert id in failed_downloads

    tmpdir.remove()
def download_products(targetfolder, user, password):


	# Create query and download files #

	# password is retrieved from command line argument, user name is currently hardcoded
	api = SentinelAPI(user, password)

	# can be a list of tile ids
	tiles = ['34PHV']


	# create dictionary with core keywords
	query_kwargs = {
			'platformname': 'Sentinel-2',
			'producttype': 'S2MSI1C',
			'relativeorbitnumber': '064',
			'date': ('20180301', '20180310')}

	print('Query arguments are:')
	for i in query_kwargs:
		print(i, ': ', query_kwargs[i])

	# create empty ordered dictionary in which to insert query result for each tile
	products = OrderedDict()

	# for each tile in 'tiles', copy the dictionary of core keywords from above (without tiles), insert the corresponding
	# tile id (or filename for data before Apr 2017),
	# then apply query using the modified dictionary, and finally update 'products' (appending query result)
	for tile in tiles:
		kw = query_kwargs.copy()
		# print(kw)
		kw['tileid'] = tile  # only works for products after 2017-03-31
		# kw['filename'] = '*_{}_*'.format(tile)  # products after 2016-12-01
		pp = api.query(**kw)
		products.update(pp)

	# download selected products
	print('Found', len(products), 'product(s).')
	api.download_all(products, directory_path = targetfolder)

	print("Downloaded " + str(len(products)) + " product(s).")
Beispiel #18
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI(
        "wrong_user",
        "wrong_password"
    )
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
    assert excinfo.value.response.status_code == 401
Beispiel #19
0
def test_download_all(tmpdir):
    api = SentinelAPI(**_api_auth)
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    # filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
    #              "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
    #              "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"]

    # Corresponding IDs
    ids = [
        "5618ce1b-923b-4df2-81d9-50b53e5aded9",
        "d8340134-878f-4891-ba4f-4df54f1e3ab4",
        "1f62a176-c980-41dc-b3a1-c735d660c910"
    ]

    # Download normally
    product_infos, failed_downloads = api.download_all(ids, str(tmpdir))
    assert len(failed_downloads) == 0
    assert len(product_infos) == len(ids)
    for product_id, product_info in product_infos.items():
        pypath = py.path.local(product_info['path'])
        assert pypath.check(exists=1, file=1)
        assert pypath.purebasename in product_info['title']
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    id, product_info = list(product_infos.items())[0]
    path = product_info['path']
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        product_infos, failed_downloads = api.download_all(
            ids, str(tmpdir), max_attempts=1, checksum=True)
        assert len(failed_downloads) == 1
        assert len(product_infos) + len(failed_downloads) == len(ids)
        assert id in failed_downloads

    tmpdir.remove()
Beispiel #20
0
class Downloader:
    def __init__(self, str_username, str_password, str_link):
        self.api = SentinelAPI(str_username, str_password, str_link)
        self.products = None

    def search_polygon(self, footprint: object, str_date_start: str,
                       str_date_end: str, str_platform_name: str, percentage: object):
        print('searching')
        self.products = self.api.query(footprint,
                                       date=(str_date_start, str_date_end),
                                       platformname=str_platform_name,
                                       cloudcoverpercentage=(percentage[0], percentage[1]))
        size = self.api.get_products_size(self.products)
        print(f'found {size}GiB of data')
        # print(self.products)

    def download_zip(self, path):
        self.api.download_all(self.products, path, max_attempt, True)

    def download_products(self, path, download_file):
        if download_file:
            self.download_zip(path)
        print('downloaded')
        df_products = self.api.to_dataframe(self.products)
        return df_products

    def download_geoproduct(self, path, download_file):
        if download_file:
            self.download_zip(path)
        # print('download Geos')
        gdf_products = self.api.to_geodataframe(self.products)
        return gdf_products

    def download_json(self):
        return self.api.to_geojson(self.products)

    def download_one(self, key, path):
        self.api.download(key, path, True)
Beispiel #21
0
def test_download_all(tmpdir):
    api = SentinelAPI(**_api_auth)
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    filenames = [
        "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
        "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
        "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"
    ]

    ids = list(api.query_raw(" OR ".join(filenames)))
    assert len(ids) == len(filenames)

    # Download normally
    product_infos, failed_downloads = api.download_all(ids, str(tmpdir))
    assert len(failed_downloads) == 0
    assert len(product_infos) == len(filenames)
    for product_id, product_info in product_infos.items():
        pypath = py.path.local(product_info['path'])
        assert pypath.purebasename in filenames
        assert pypath.check(exists=1, file=1)
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    id, product_info = list(product_infos.items())[0]
    path = product_info['path']
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % id
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        product_infos, failed_downloads = api.download_all(ids,
                                                           str(tmpdir),
                                                           max_attempts=1,
                                                           checksum=True)
        assert len(failed_downloads) == 1
        assert len(product_infos) + len(failed_downloads) == len(filenames)
        assert id in failed_downloads
def test_scihub_unresponsive(small_query):
    timeout_connect = 6
    timeout_read = 6.6
    timeout = (timeout_connect, timeout_read)

    api = SentinelAPI("mock_user", "mock_password", timeout=timeout)

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY,
                     requests_mock.ANY,
                     exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.query(**small_query)

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY,
                     requests_mock.ANY,
                     exc=requests.exceptions.ReadTimeout)
        with pytest.raises(requests.exceptions.ReadTimeout):
            api.query(**small_query)

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
class Sentinel5P(API):
    CONFIG_PATH = "./config/config.json"

    def __init__(self, config_name="SentinelAPI"):
        self._config = load_config(Sentinel5P.CONFIG_PATH, config_name)
        self._defaults = self._config['default']
        self._api_link = self._config['api_link']
        user, password = get_credentials(self._config)
        self._api = SentinelAPI(user, password, self._api_link)

    def download(self, 
                download_path,
                area_gjson=None, 
                date_from=None, 
                date_to=None, 
                platform_name=None, 
                product_type=None):
        area = API.get_default_if_empty(area_gjson, self._defaults['area'])
        products = self._api.query(
            geojson_to_wkt(area) if area is not None else None,
            date=self._parse_date(
                API.get_default_if_empty(date_from, self._defaults['time']['start']), 
                API.get_default_if_empty(date_to, self._defaults['time']['end'])
            ),
            platformname=API.get_default_if_empty(platform_name, self._defaults['platform_name']),
            producttype=API.get_default_if_empty(product_type, self._defaults['product_type'])
        )
        self._api.download_all(products, directory_path=download_path)

    def _parse_date(self, date_from, date_to):
        return (date_from.replace('-', ''), date_to.replace('-', ''))

    def get_info(self):
        return {
            "config": self._config,
            "api_link": self._api_link
        }
Beispiel #24
0
def inicio():
    if request.method == 'POST':
        pac = request.form

        # Setting directories
        fullpath = os.path.join(ROOTDIR, pac['project'])
        if not os.path.exists(fullpath):
            os.makedirs(fullpath)
        for sdir in DIRS:
            if not os.path.exists(os.path.join(fullpath, sdir)):
                os.makedirs(os.path.join(fullpath, sdir))

        # Setting square of interest, based on 4 coordinates
        line = ogr.Geometry(ogr.wkbLinearRing)
        poly = ogr.Geometry(ogr.wkbPolygon)
        coord = list(
            zip([pac['west']] * 2 + [pac['east']] * 2,
                [pac['north'], pac['south']] + [pac['south'], pac['north']]))
        coord.append(coord[0])
        for i in coord:
            line.AddPoint(float(i[0]), float(i[1]))
        poly.AddGeometry(line)

        # Setting api, query, and download
        api = SentinelAPI('aparedes', 'N1E3I3ipjdm5FEV8oaQN',
                          'https://scihub.copernicus.eu/dhus')
        filesd = {}
        failed = {}
        date = (''.join(pac['start_date'].split("-")),
                ''.join(pac['end_date'].split("-")))
        products = api.query(
            poly.ExportToWkt(),
            date,
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, int(pac['max_cloud'])),
            producttype='S2MSI1C'
        )  #lv1 sale en el dia, lv2 sale como postproceso, 48 hrs dpues
        productos = []
        ids = []
        down_size = sum(
            [float(v['size'].split(' ')[0]) for k, v in products.items()])
        productos.append([(v['filename']) for k, v in products.items()])
        ids.append([(k) for k, v in products.items()])
        filesd, triggered, failed = api.download_all(
            products, directory_path=os.path.join(fullpath, DIRS[0]))
        return 'Se descargaron {} productos, de {}MB en total.'.format(
            len(products), down_size)
    else:
        return flask.render_template('sentinel_selection.html')
Beispiel #25
0
def test_skip_products(number_to_skip):
    """ Test if skipping works correctly with mixed list of uuid, pdf, xml
    uuid: (5618ce1b-923b-4df2-81d9-50b53e5aded9) S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C
    pdf:  (1f62a176-c980-41dc-b3a1-c735d660c910) S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E
    xml:  (c8f3acd1-67da-4d9f-bd42-86278e3b46a8) S2A_OPER_PRD_MSIL1C_PDMC_20161013T075059_R111_V20161012T161812_20161012T161807
    """
    api = SentinelAPI(**_api_auth)
    ids = ["5618ce1b-923b-4df2-81d9-50b53e5aded9",
           "1f62a176-c980-41dc-b3a1-c735d660c910",
           "c8f3acd1-67da-4d9f-bd42-86278e3b46a8"]

    skip_products = ["5618ce1b-923b-4df2-81d9-50b53e5aded9",
                     path.join(FIXTURES_DIR, "title_pdf"),
                     path.join(FIXTURES_DIR, "title_xml")]

    product_infos, _ = api.download_all(ids, path.join(FIXTURES_DIR, "vcr_cassettes"),
                                        skip_products=skip_products[0:number_to_skip])

    assert 3 - number_to_skip == len(product_infos)
Beispiel #26
0
    def _downloader(self, datelist, sensor, continent, obs_folder, obs_lst,
                    uid, psw):
        local_obs_date = self._available_dec(obs_lst)

        api = SentinelAPI(uid, psw)

        if continent == 'Africa':
            footprint = geojson_to_wkt(
                read_geojson(r'L:/HSL/poi/AOI_Africa.geojson'))
        elif continent == 'West_Asia':
            footprint = geojson_to_wkt(
                read_geojson(r'L:/HSL/poi/AOI_West_Asia.geojson'))
        elif continent == 'Europe':
            footprint = geojson_to_wkt(
                read_geojson(r'L:/HSL/poi/AOI_Europe.geojson'))

        products = api.query(footprint,
                             filename=f'S3{sensor}_*',
                             producttype='SY_2_V10___',
                             date=(datelist[0], datelist[-1]))
        products_df = api.to_dataframe(products)

        if products_df.size != 0:
            products_df_sorted = products_df.sort_values(['ingestiondate'],
                                                         ascending=[True])
            products_df_sorted = products_df_sorted.head(24)

        download_list = products_df_sorted[
            ~products_df_sorted['beginposition'].
            isin(local_obs_date)].drop_duplicates(subset=['beginposition'],
                                                  keep='last')

        if download_list.size != 0:
            downloaded = api.download_all(download_list.index,
                                          directory_path=obs_folder,
                                          n_concurrent_dl=2)
            if len(downloaded[2]) != 0:
                print('Info ! Some dataset were not downloaded')
            self._unzipper(obs_folder)
        else:
            pass
Beispiel #27
0
def downloadProductSentinel(product, dest_dir, remove_zip=False):
    '''
    download and extract sentinel product zip file. 

    Inputs: 
    ------
    product: must be a product identifier, like '93b051ba-504c-4746-8f74-c422187d33aa'. These are 
    typically the indices of the dataframe containing
    the products. 

    dest_dir: imagery destination directory. 

    remove_zip: whether to delete zipfile. 


    Outputs:
    -------
    zipfilepath, imagepath (.SAFE)
    '''

    api = SentinelAPI(SENTINEL_USER, SENTINEL_PASS,
                      'https://scihub.copernicus.eu/dhus/')

    downloadResult = api.download_all(product, directory_path=dest_dir)

    if type(product) == Index:
        #strings from now on
        product = product[0]

    zipfilePath = downloadResult[0][product]['path']

    file = zipfile.ZipFile(zipfilePath, 'r')
    file.extractall(dest_dir)

    dirname = zipfilePath.split('.')[0] + '.SAFE/'

    return (zipfilePath, dirname)
Beispiel #28
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date
import os
import zipfile

OUTPUT_FOLDER = 'D:\\NewData'

api = SentinelAPI('alpha_03', 'R9846om157', 'https://scihub.copernicus.eu/dhus')

footprint = geojson_to_wkt(read_geojson('map.geojson'))
products = api.query(footprint,
                     platformname='Sentinel-2',
                     date=("20190501", '20191030'),
                     producttype='S2MSI1C',
                     cloudcoverpercentage=(0, 20))

print(len(products))

if not os.path.exists(OUTPUT_FOLDER):
    os.makedirs(OUTPUT_FOLDER)

api.download_all(products, OUTPUT_FOLDER)

'''
for path in os.listdir(OUTPUT_FOLDER):
    if os.path.splitext(path)[1] == '.zip':
        with zipfile.ZipFile(path, 'r') as zip_ref:
            zip_ref.extractall(OUTPUT_FOLDER)
        
        os.remove(path)'''
Beispiel #29
0
test['cloudcoverpercentage']
test['ingestiondate']
set(products_df['processinglevel'])

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
geojson_products = api.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
geodata_products = api.to_geodataframe(products)

# plot product time vs cloudcover
# data = [go.Scatter(x=products_df_sorted.ingestiondate, y=products_df_sorted[['cloudcoverpercentage']])]
# py.plotly.iplot(data, filename = 'time-series-simple')

# download
api.download_all(test.index)

# unzip
# todo: needs loop to run through the data sets
directory_to_extract_to = "unzip"
zip = zipfile.ZipFile(str(test.title[0]) + '.zip')
zip.extractall(directory_to_extract_to)
zip.close()

# todo: only process certain bands
# os.system("mkdir /Users/philipp/Projects/PycharmProjects/sentinel/unzip/" + str(test.title[1]) + '.SAFE/NDVIBANDS')
# os.system('cp /Users/philipp/Projects/PycharmProjects/sentinel/unzip/'+str(test.title[1])+'.SAFE/GRANULE'+str(test.index[1])+'IMG_DATA'+ str(test.title[1])[-22:]/Users/philipp/Projects/PycharmProjects/sentinel/unzip/"+str(test.title[1])+'.SAFE/NDVIBANDS')


# atmospheric correction using Sen2Cor
cmd = '/Applications/Sens2Cor/bin/L2A_Process --resolution 10 ' + '/Users/philipp/Projects/PycharmProjects/RS/unzip/' + str(
class Downloader(object):
    def __init__(self,
                 username,
                 password,
                 satellite,
                 order_id,
                 directory=Path('/data/')):
        # The connection to ESA scihub
        self.api = SentinelAPI(username, password,
                               'https://scihub.copernicus.eu/dhus')

        # Sentinel-5p currently has its own pre-operations hub
        self.api_s5p = SentinelAPI(user='******',
                                   password='******',
                                   api_url='https://s5phub.copernicus.eu/dhus')

        # Use the current datetime to name the download order
        self.order_id = order_id

        # Use ordered dict to store the metadata of the queries products
        self.products = OrderedDict()

        self.satellite = satellite

        self.directory = directory
        # if not self.directory.exists():  # Create directory if it does not exist
        #     os.makedirs(self.directory)

    def query(self, footprint, startdate, enddate):
        if self.satellite == 's1' or self.satellite == 'all':
            self.query_s1(footprint, startdate, enddate)
        if self.satellite == 's2' or self.satellite == 'all':
            self.query_s2(footprint, startdate, enddate)
        if self.satellite == 's3' or self.satellite == 'all':
            self.query_s3(footprint, startdate, enddate)
        if self.satellite == 's5p' or self.satellite == 'all':
            self.query_s5p(footprint, startdate, enddate)

    def query_s1(self, footprint, startdate, enddate):
        # Define producttypes (here it is Sentinel-1 GRDH products)
        producttypes = ['GRD']

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        if FLAGS.s1_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(footprint,
                                                  date=(startdate, enddate),
                                                  platformname='Sentinel-1',
                                                  producttype=producttype,
                                                  sensoroperationalmode='IW')
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s1_relative_orbit:
                    queried_products = self.api.query(
                        footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-1',
                        producttype=producttype,
                        sensoroperationalmode='IW',
                        relativeorbitnumber=relative_orbit)
                    self.products.update(queried_products)

    def query_s2(self, footprint, startdate, enddate):
        # Load parameters from FLAGS
        max_cloudcoverage = FLAGS.s2_max_cloudcoverage

        # Define producttypes (here it is Sentinel-2 L2A products)
        producttypes = [
            'S2MSI2Ap', 'S2MSI2A'
        ]  # Producttype names differ depending on the year they were published

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        if FLAGS.s2_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(
                    footprint,
                    date=(startdate, enddate),
                    platformname='Sentinel-2',
                    producttype=producttype,
                    cloudcoverpercentage=(0, max_cloudcoverage))
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s2_relative_orbit:
                    queried_products = self.api.query(
                        footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-2',
                        relativeorbitnumber=relative_orbit,
                        producttype=producttype,
                        cloudcoverpercentage=(0, max_cloudcoverage))
                    self.products.update(queried_products)

    def query_s3(self, footprint, startdate, enddate):
        queried_products = self.api.query(footprint,
                                          date=(startdate, enddate),
                                          platformname='Sentinel-3',
                                          producttype='SL_2_LST___',
                                          productlevel='L2')

        self.products.update(queried_products)

    def query_s5p(self, footprint, startdate, enddate):
        kwargs = {}
        producttypedescriptions = [
            'Ozone', 'Sulphur Dioxide', 'Nitrogen Dioxide', 'Methane',
            'Formaldehyde', 'Carbon Monoxide', 'Aerosol Index',
            'Aerosol Layer Height', 'Cloud'
        ]
        # producttypedescriptions = ['Ozone']

        # Loop over producttypes and update the query dictionary
        for producttypedescription in producttypedescriptions:
            queried_products = self.api_s5p.query(
                footprint,
                date=(startdate, enddate),
                platformname='Sentinel-5 Precursor',
                processinglevel='L2',
                producttypedescription=producttypedescription,
                **kwargs)
            # Remove any 'Suomi-NPP VIIRS Clouds' products which are returned as 'Cloud' (they shouldn't have been)
            # https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-5p/products-algorithms
            if producttypedescription == 'Cloud':
                temp_queried_products = queried_products.copy()
                for key in queried_products.keys():
                    if queried_products[key][
                            'producttypedescription'] != 'Cloud':
                        del temp_queried_products[key]
                queried_products = temp_queried_products
            self.products.update(queried_products)

    def print_num_and_size_of_products(self):
        logging.info('Number of products = ' + str(len(list(self.products))))
        logging.info('Total size [GB] = ' +
                     str(self.api.get_products_size(self.products)))

    # https://sentinelsat.readthedocs.io/en/master/api.html#lta-products
    # TODO: Get LTA retrieval to work properly (install of newest sentinelsat version is in dockerfile)
    # Retry every 30 min (+10 second buffertime) to request LTA products.
    @tenacity.retry(stop=tenacity.stop_after_attempt(200),
                    wait=tenacity.wait_fixed(1810))
    def download_zipfiles(self):
        zipfiles_directory = self.directory / 'zipfiles'
        if not zipfiles_directory.exists(
        ):  # Create directory if it does not exist
            os.makedirs(zipfiles_directory)

        # Get the products to be downloaded. The sample() funcitons permutes the dataframe, such that a new LTA product
        # is request at every retry. The optimal solution would have been to rearrange the dataframe by rotating the
        # index at every retry, but this is a quick and dirty way to achieve something similar.
        # (https://stackoverflow.com/a/34879805/12045808).
        products_df = self.queried_products_as_df().sample(frac=1)

        # NOTE: The code below is only useful while the Sentinel-5p has a different api than the others. After this has
        #       been fixed, the code should be reduced to the following single line:
        # Download all zipfiles (it automatically checks if zipfiles already exist)
        # self.api.download_all(self.products, directory_path=zipfiles_directory)  # Download all zipfiles
        # But for now, use the following code:
        non_s5p_products = products_df[
            products_df['platformname'] != 'Sentinel-5 Precursor']
        s5p_products = products_df[products_df['platformname'] ==
                                   'Sentinel-5 Precursor']

        if len(non_s5p_products):
            logging.info("Downloading Sentinel-1/2/3 products")
            self.api.download_all(non_s5p_products.to_dict(into=OrderedDict,
                                                           orient='index'),
                                  directory_path=zipfiles_directory)
        else:
            logging.info("No Sentinel-1/2/3 products found in query")

        if len(s5p_products):
            logging.info("Downloading Sentinel-5p products")
            self.api_s5p.download_all(s5p_products.to_dict(into=OrderedDict,
                                                           orient='index'),
                                      directory_path=zipfiles_directory)
        else:
            logging.info("No Sentinel-5p products found in query")

        # The Sentinel-5p data has wrongly been given the filetype .zip, but it should be .nc, so make a copy with
        # .nc extension. A copy is made instead of renaming so sentinelsat doesn't re-download the file every time
        # it is run.
        s5p_downloaded_files = zipfiles_directory.glob('S5P*.zip')
        logging.debug(
            "Renaming downloaded Sentinel-5p files from .zip to .nc (due to bug in SentinelSat)"
        )
        for file in s5p_downloaded_files:
            if not file.with_suffix('.nc').exists():
                shutil.copy(str(file), str(file.with_suffix('.nc')))

    def queried_products_as_geojson(self):
        return self.api.to_geojson(self.products)

    def queried_products_as_df(self):
        return self.api.to_dataframe(self.products)

    def save_queried_products(self):
        orders_directory = self.directory / 'orders'
        if not orders_directory.exists():
            os.makedirs(orders_directory)

        # Save the queried products to a geojson file (e.g. to be loaded into QGIS)
        geojson_path = (self.directory / 'orders' /
                        self.order_id).with_suffix('.geojson')
        with geojson_path.open('w') as geojson_file:
            geojson_data = self.api.to_geojson(self.products)
            geojson_file.write(str(geojson_data))

        # Save the queried products as pandas df in a pkl file (preferred format when working in Python)
        df_path = (self.directory / 'orders' /
                   self.order_id).with_suffix('.pkl')
        df = self.api.to_dataframe(self.products)
        df.to_pickle(df_path)
Beispiel #31
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt


# -------------------------------------------
# necessary information:
user = '******'
password = '******'

# YYYYMMDD
start_date = '20150101'
end_date = '20180207'

# map.geojson with boundary coordinates
# just generate and save as "map.geojson" using: --- http://geojson.io ---
geojson_path = 'directory\\to\\the\\file\\map.geojson'


# where to save the data
save_path = 'directory\\to\\the\\save_folder'


# -------------------------------------------

# connect to the API / SentinelHub
api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus', show_progressbars=True)
footprint = geojson_to_wkt(read_geojson(geojson_path))
products = api.query(footprint,date=(start_date, end_date), platformname='Sentinel-2', producttype='S2MSI2Ap')
print 'Number of images: {}'.format(len(products))
api.download_all(products, save_path)

def download_best(_box: box, download_path: str, user: str,
                  pw: str) -> tp.List[str]:
    _api = SentinelAPI(user, pw, 'https://scihub.copernicus.eu/dhus')

    file_path = os.path.join(download_path, "save.csv")

    if not os.path.exists(file_path):

        products = _api.query(
            _box,
            date=('NOW-1MONTH', 'NOW'),
            platformname='Sentinel-2',
            processinglevel='Level-1C',
            cloudcoverpercentage=(0, 10),
        )

        products_df = _api.to_dataframe(products)

        tile_ids = []

        def _unknown_tile_id(x: str, t_ids: tp.List) -> bool:
            ret_val = x in t_ids
            if not ret_val:
                t_ids.append(x)

            return not ret_val

        # sort products
        products_df_sorted = products_df.sort_values(["cloudcoverpercentage"],
                                                     ascending=[True])

        # sort out tiles double tiles with higher cloud coverage
        first_tiles = [
            _unknown_tile_id(x, tile_ids)
            for x in list(products_df_sorted['tileid'].array)
        ]
        #  first_titles = np.vectorize(_unknown_tile_id(lambda x:x, tile_ids))(products_df_sorted['tileid'].array)
        products_df_sorted_unique = products_df_sorted[first_tiles]

        if not os.path.exists(download_path):
            os.makedirs(download_path)
        products_df_sorted_unique.to_csv(file_path)
    else:
        products_df_sorted_unique = pd.read_pickle(file_path)

    products_df_sorted_unique['area'] = [
        __estimate_area(loads(e))
        for e in list(products_df_sorted_unique['footprint'].array)
    ]

    #  sort out areas smaller than three quarter of the full size of 100 km * 100 km
    products_df_sorted_unique_larger = products_df_sorted_unique[
        products_df_sorted_unique['area'] > 100000 * 100000 / 4 * 3]

    _api.download_all(products_df_sorted_unique_larger.uuid, download_path)

    # estimate area from footprint

    return [
        os.path.join(download_path, x) for x in products_df_sorted_unique.title
    ]
class GetImage():
    def __init__(self):
        self.api = None
        self._connect_to_api()

    def _connect_to_api(self):
        user = '******'
        password = "******"
        self.api = SentinelAPI(user, password,
                               'https://scihub.copernicus.eu/dhus')

    def _create_footprint(self, sf):
        footprint = None
        for i in sf['geometry']:
            footprint = i
        return footprint

    def _get_product(self,
                     footprint,
                     num_images,
                     start_date=0,
                     end_date=0,
                     processing_level='Level-2A'):
        products = self.api.query(footprint,
                                  date=('20190601', '20190626'),
                                  platformname='Sentinel-2',
                                  processinglevel='Level-2A',
                                  cloudcoverpercentage=(0, 10))
        products_gdf = self.api.to_geodataframe(products)
        best_products = products_gdf.sort_values(
            ['cloudcoverpercentage', 'ingestiondate'],
            ascending=[True, True]).head(num_images)
        print(best_products[['title', 'cloudcoverpercentage']])
        return best_products

    def _download_image(self, products):
        save_path = "products/"
        extension = ".zip"

        origin_path = os.getcwd()
        os.makedirs(save_path, exist_ok=True)
        os.chdir(save_path)
        current_path = os.getcwd()
        self.api.download_all(products.index)

        for item in os.listdir(current_path):
            if item.endswith(extension):
                file_name = os.path.abspath(item)
                zip_ref = zipfile.ZipFile(file_name)
                zip_ref.extractall(current_path)
                zip_ref.close()
                os.remove(file_name)
        print(current_path)
        directories = [x for x in glob.glob(current_path + '/*/')]
        os.chdir(origin_path)

        return directories

    def get_image(self, shp_path, num_images):
        sf = gpd.read_file(shp_path)
        footprint = self._create_footprint(sf)
        products = self._get_product(footprint, num_images)
        list_images = self._download_image(products)
        print(list_images)
        return 0
Beispiel #34
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt

api = SentinelAPI('wzy951218', 'wzy951218',
                  r'https://scihub.copernicus.eu/apihub/')

footprint = 'POLYGON((-144.06375022091746 49.578033057579205,-133.78368082953386 49.578033057579205,-119.7913641579284 49.29950403177739,-118.50635548400545 44.9305972448679,-119.22024919174042 38.769771447127226,-115.36522316997157 32.99050642026454,-111.36741840665574 27.442065274198697,-104.65681755394698 21.997798955341523,-103.37180888002403 16.19931850350966,-107.94072860952785 13.439030598741425,-111.51019714820274 12.46502033664244,-117.93524051781748 12.32556981839386,-121.07637283185136 12.46502033664244,-144.06375022091746 49.578033057579205,-144.06375022091746 49.578033057579205))'
products = api.query(footprint,
                     date=('20180601', '20180731'),
                     platformname='Sentinel - 2',
                     producttype='S2MSI1C',
                     cloudcoverpercentage=(5, 25),
                     area_relation='IsWithin')  # 云量百分比
path = "F:\America\S2A\\"

api.download_all(products, directory_path=path)
# all the requests have been queued in the OrderedDict 'products',
# start the bulk download of the tiles.

print('Proceeding to download:')

for uid in products:

    archive = products[uid]
    tileid = archive['tileid']
    acqdate = archive['datatakesensingstart'].strftime("%Y-%m-%d")

    _logger.info('- Tile %s acquired on %s' % (tileid, acqdate))

if sw_downloads:
    api.download_all(products, tilesdir)

# all the tiles matching the search parameters are saved in the
# products data structure. 'products' is a dictionary of dictionaries.
# See the function 'extractSentinel2Bands' for more details

# Extract all band images from archive

if platform == 'Sentinel-2':
    procQueue = extractSentinel2Bands(tilesdir, products)
else:
    _logger.info('Platform not supported. Unzip archive manually.')

#procQueue = []
#procQueue.append(r'D:\_RSDATA_\Sentinel-2\54LYJ\20181208')
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt


region = {"type": "Polygon", "coordinates": [[[-69.87682044199994, 12.427394924000097], [-70.05809485599988, 12.537176825000088], [-70.04873613199993, 12.632147528000104], [-69.93639075399994, 12.53172435100005], [-69.87682044199994, 12.427394924000097]]]}

api = SentinelAPI('nilshempelmann', '****')
footprint = geojson_to_wkt(region)
products = api.query(footprint,
                     producttype='SLC',
                     orbitdirection='ASCENDING')
api.download_all(products)