Beispiel #1
0
def test_small_query():
    api = SentinelAPI(**_api_kwargs)
    api.query(**_small_query)
    assert api._last_query == (
        '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) '
        'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")')
    assert api._last_status_code == 200
Beispiel #2
0
def test_scihub_unresponsive():
    timeout_connect = 6
    timeout_read = 6.6
    timeout = (timeout_connect, timeout_read)

    api = SentinelAPI("mock_user", "mock_password", timeout=timeout)

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ReadTimeout)
        with pytest.raises(requests.exceptions.ReadTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
Beispiel #3
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI(
        "wrong_user",
        "wrong_password"
    )
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401
Beispiel #4
0
def test_SentinelAPI_connection():
    api = SentinelAPI(**_api_auth)
    api.query(**_small_query)

    assert api._last_query == (
        'beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z] '
        'footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))"')
    assert api._last_response.status_code == 200
Beispiel #5
0
def test_get_products_invalid_json():
    api = SentinelAPI("mock_user", "mock_password")
    with requests_mock.mock() as rqst:
        rqst.post(
            'https://scihub.copernicus.eu/apihub/search?format=json',
            text="{Invalid JSON response", status_code=200
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.query(
                area=geojson_to_wkt(read_geojson(FIXTURES_DIR + "/map.geojson")),
                date=("20151219", "20151228"),
                platformname="Sentinel-2"
            )
        assert excinfo.value.msg == "Invalid API response."
Beispiel #6
0
def test_date_arithmetic():
    api = SentinelAPI(**_api_kwargs)
    products = api.query('ENVELOPE(0, 10, 10, 0)',
                         ('2016-12-01T00:00:00Z-1DAY',
                          '2016-12-01T00:00:00Z+1DAY-1HOUR'))
    assert api._last_response.status_code == 200
    assert len(products) > 0
Beispiel #7
0
def test_large_query():
    api = SentinelAPI(**_api_kwargs)
    full_products = list(api.query(**_large_query))
    assert api._last_query == (
        'beginPosition:[2015-12-01T00:00:00Z TO 2015-12-31T00:00:00Z] '
        'footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))"')
    assert api._last_response.status_code == 200
    assert len(full_products) > api.page_size

    result = list(api.query(limit=150, **_large_query))
    assert result == full_products[:150]

    result = list(api.query(limit=20, offset=90, **_large_query))
    assert result == full_products[90:110]

    result = list(api.query(limit=20, offset=len(full_products) - 10, **_large_query))
    assert result == full_products[-10:]
Beispiel #8
0
def products():
    """A fixture for tests that need some non-specific set of products as input."""
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson('tests/map.geojson')),
        "20151219", "20151228"
    )
    return products
Beispiel #9
0
def test_large_query():
    api = SentinelAPI(**_api_kwargs)
    products = api.query(**_large_query)
    assert api._last_query == (
        '(beginPosition:[2015-12-01T00:00:00Z TO 2015-12-31T00:00:00Z]) '
        'AND (footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))")')
    assert api._last_status_code == 200
    assert len(products) > api.page_size
Beispiel #10
0
def products(api_kwargs, vcr, test_wkt):
    """A fixture for tests that need some non-specific set of products as input."""
    with vcr.use_cassette('products_fixture', decode_compressed_response=False):
        api = SentinelAPI(**api_kwargs)
        products = api.query(
            test_wkt,
            ("20151219", "20151228")
        )
    assert len(products) > 20
    return products
Beispiel #11
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI(
        "wrong_user",
        "wrong_password"
    )
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
    assert excinfo.value.response.status_code == 401
Beispiel #12
0
def test_area_relation():
    api = SentinelAPI(**_api_auth)
    params = dict(
        area="POLYGON((10.83 53.04,11.64 53.04,11.64 52.65,10.83 52.65,10.83 53.04))",
        date=("20151219", "20151226")
    )
    result = api.query(**params)
    n_intersects = len(result)
    assert n_intersects > 10

    result = api.query(area_relation="contains", **params)
    n_contains = len(result)
    assert 0 < n_contains < n_intersects
    result = api.query(area_relation="IsWithin", **params)
    n_iswithin = len(result)
    assert n_iswithin == 0

    # Check that unsupported relations raise an error
    with pytest.raises(ValueError) as excinfo:
        api.query(area_relation="disjoint", **params)
Beispiel #13
0
def test_get_products_size(products):
    assert SentinelAPI.get_products_size(products) == 90.94

    # load a new very small query
    api = SentinelAPI(**_api_auth)
    with my_vcr.use_cassette('test_get_products_size'):
        products = api.query(
            raw="S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E")
    assert len(products) > 0
    # Rounded to zero
    assert SentinelAPI.get_products_size(products) == 0
Beispiel #14
0
def test_order_by():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')),
        ("20151219", "20151228"),
        platformname="Sentinel-2",
        cloudcoverpercentage=(0, 10),
        order_by="cloudcoverpercentage, -beginposition"
    )
    assert len(products) == 3
    vals = [x["cloudcoverpercentage"] for x in products.values()]
    assert sorted(vals) == vals
Beispiel #15
0
def test_s2_cloudcover():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson('tests/map.geojson')),
        "20151219", "20151228",
        platformname="Sentinel-2",
        cloudcoverpercentage="[0 TO 10]"
    )
    assert len(products) == 3

    product_ids = list(products)
    assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef"
    assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e"
    assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296"
Beispiel #16
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')),
        (datetime(2014, 10, 10), datetime(2014, 12, 31)), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        assert not footprint['geometry'].errors()

    with open(FIXTURES_DIR + '/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #17
0
def test_too_long_query():
    api = SentinelAPI(**_api_kwargs)

    # Test whether our limit calculation is reasonably correct and
    # that a relevant error message is provided

    def create_query(n):
        return api.format_query(date=("NOW", "NOW"), raw=" abc_:*.+*~!," * n)

    # Expect no error
    q = create_query(170)
    assert 0.99 < SentinelAPI.check_query_length(q) < 1.0
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(raw=q)
    assert "Invalid query string" in excinfo.value.msg

    # Expect HTTP status 500 Internal Server Error
    q = create_query(171)
    assert 1.0 <= SentinelAPI.check_query_length(q) < 1.01
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(raw=q)
    assert excinfo.value.response.status_code == 500
    assert ("Request Entity Too Large" in excinfo.value.msg or
            "Request-URI Too Long" in excinfo.value.msg)
Beispiel #18
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson('tests/map.geojson')),
        datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #19
0
def test_s2_cloudcover():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')),
        ("20151219", "20151228"),
        platformname="Sentinel-2",
        cloudcoverpercentage=(0, 10)
    )
    assert len(products) == 3

    product_ids = list(products)
    assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef"
    assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e"
    assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296"

    # For order-by test
    vals = [x["cloudcoverpercentage"] for x in products.values()]
    assert sorted(vals) != vals
#!/usr/bin/env python
import time
import datetime
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt

start_date = '20160101'  # hardcoded value
current_date = datetime.date.today().strftime("%Y%m%d")
#current_date = datetime.date.today().strftime("%Y%m%d")

print "loading!"
api = SentinelAPI('mpc', 'q11g33h99', 'https://scihub.copernicus.eu/dhus')
footprint = geojson_to_wkt(read_geojson('map.geojson'))

products = api.query(footprint,
                     date=(start_date, current_date),
                     platformname='Sentinel-2',
                     producttype='S2MSI1C',
                     cloudcoverpercentage=(0, 15))

api.download_all(products)

print "DONE!!!"
Beispiel #21
0
def products():
    """A fixture for tests that need some non-specific set of products as input."""
    api = SentinelAPI(**_api_auth)
    products = api.query(geojson_to_wkt(read_geojson('tests/map.geojson')),
                         "20151219", "20151228")
    return products
        # loop through features within the same file
        for j, geometry in enumerate(read_geojson(path)["features"]):

            # converts to well-known text
            footprint = geojson_to_wkt(geometry)

            # determine if searching by dates or by seasons
            if dates is not None and seasons is None:

                # find products within dates and suitable cloud cover
                products = api.query(
                    footprint,
                    date=('{}{}{}'.format(dates['year'][0], dates['month'][0],
                                          dates['day'][0]),
                          '{}{}{}'.format(dates['year'][1], dates['month'][1],
                                          dates['day'][1])),
                    platformname='Sentinel-2',
                    processinglevel='Level-2A',
                    cloudcoverpercentage=(0, cloud_cover_percentage))

                # print error message if there are no images which fit the requirements
                if len(list(products)) == 0:
                    print(
                        "There are no Sentinel-2 images for this AOI taken within the specified dates"
                    )
                    break

                # convert from dictionary to Pandas dataframe
                products_df = api.to_dataframe(products)
Beispiel #23
0
#====================================================================
#	Retrive latest sentinel 2
#====================================================================
#https://www.evernote.com/Home.action#n=e77ce355-1b1e-4a89-896b-4036f905dfea&ses=1&sh=5&sds=5&x=sentinel&

#This CLI works:
sentinelsat -u jfiddes -p sT0kkang -g extent.json -s 20151201 -e 20151207 --sentinel 2 --cloud 90 -d

#This API doesnt work yet (input posiitions from getExtent.py:

from geojson import Polygon
 gj = Polygon([[(float(lonW), float(latN)), (float(lonW), float(latS)) , (float(lonE),float(latS)), (float(lonE), float(latN)), (float(lonW), float(latN))]])

{"coordinates": [[[lonW, latN], [lonW, latN],  [lonE,latS],  [lonE, latN] ]], "type": "Polygon"}

import geojson
gj.is_valid



from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date
api = SentinelAPI('jfiddes', 'sT0kkang', 'https://scihub.copernicus.eu/dhus')
footprint = gj
products = api.query(footprint, beginposition = '[20160101 TO 20160103]', platformname = 'Sentinel-2', cloudcoverpercentage = '[0 TO 30]')
Beispiel #24
0
def sent1_query(user,
                passwd,
                geojsonfile,
                start_date,
                end_date,
                output_folder=None,
                api=True):
    """
    A convenience function that wraps sentinelsat query and download
    
    Notes
    -----------
    
    I have found the sentinesat sometimes fails to download the second image,
    so I have written some code to avoid this - choose api = False for this
    
    Parameters
    -----------
    
    user : string
           username for esa hub
        
    passwd : string
             password for hub
        
    geojsonfile : string
                  AOI polygon of interest
    
    start_date : string
                 date of beginning of search
    
    end_date : string
               date of end of search
    
    output_folder : string
                    where you intend to download the imagery

    """

    #TODO: Check if SentinelAPI will use TokenAuth instead of hard-coded cred strings
    api = SentinelAPI(user, passwd)

    if oldsat is True:
        footprint = get_coordinates(geojsonfile)
    else:
        footprint = geojson_to_wkt(read_geojson(geojsonfile))
    products = api.query(footprint, ((start_date, end_date)),
                         platformname="Sentinel-1",
                         producttype="GRD",
                         polarisationmode="VV, VH")
    products_df = api.to_dataframe(products)

    if api is True and output_folder != None:

        api.download_all(directory_path=output_folder)

    else:
        prods = np.arange(len(products))
        #TODO: investigate flaky sentinelAPI
        # the api was proving flaky whereas the cmd line always works hence this
        # is alternate the download option
        if output_folder != None:
            #            procList = []
            for prod in prods:
                #os.chdir(output_folder)
                sceneID = products[prod]['id']
                cmd = [
                    'sentinel', 'download', '-p', output_folder, user, passwd,
                    sceneID
                ]
                print(sceneID + ' downloading')
                subprocess.call(cmd)
    return products_df, products
Beispiel #25
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date

api = SentinelAPI(user='******',
                  password='******',
                  api_url='https://s5phub.copernicus.eu/dhus')
footprint = geojson_to_wkt(
    read_geojson(
        '~/Dropbox/daniel/sentinel_5_colombia/maps/colombia_area.geojson'))
products = api.query(area=footprint,
                     date="[20191231 TO NOW]",
                     platformname='Sentinel-5p')

# downloading products
api.download_all(
    products,
    directory_path=
    "~/Dropbox/daniel/sentinel_5_colombia/products/202001_20200313")
Beispiel #26
0
class download_sentinel(Dataset_general):
    def __init__(self, username, password):

        super().__init__()
        self.username = username
        self.password = password

    def sentinel_2(self,download_type='ROI_polygon', roi_polygon=None, startdate=None, enddate=None, cloudcover_max=5):
        params = {'download_type':download_type,'roi_polygon': roi_polygon, 'startdate': startdate, 'enddate': enddate,'platformname': 'Sentinel-2'}
        return self.fetch_datasets(**params)

    def sentinel_1(self,download_type='ROI_polygon', roi_polygon=None, startdate=None, enddate=None):
        params = {'download_type':download_type,'roi_polygon': roi_polygon, 'startdate': startdate, 'enddate': enddate,'platformname': 'Sentinel-1'}
        return self.fetch_datasets(**params)

    def fetch_datasets(self, download_type='ROI_polygon', roi_polygon=None, startdate=None, enddate=None, cloudcover_max=5,
                       platformname='Sentinel-2'):
        """

        :param download_type:
        :param username:
        :param password:
        :param roi_polygon:
        :param startdate:
        :param enddate:
        :param cloudcover_max:
        :param platformname:
        :return:
        """

        if startdate:
            self.startdate = startdate
        if enddate:
            self.enddate = enddate

        if roi_polygon:
            self.roi_polygon = roi_polygon
        self.api = SentinelAPI(self.username, self.password, 'https://scihub.copernicus.eu/dhus')

        product_id = None
        if download_type == 'full':
            if product_id:
                self.api.download(product_id)
            else:
                print('product id required')

        if download_type == 'ROI_polygon':
            if roi_polygon.split('.')[-1] == 'geojson':
                
                file_obj = open(self.roi_polygon, "r")
                json_data = file_obj.read()
                file_obj.close()
                json_data = json_data.encode().decode('utf-8-sig')  # Remove utf-8 data if any present in the file
                json_data = json.loads(json_data)
                footprint = geojson_to_wkt(json_data)

                if platformname == 'Sentinel-2':
                    self.products = self.api.query(footprint,
                                                date=(self.startdate, self.enddate),
                                                platformname=platformname,
                                                cloudcoverpercentage=(0, cloudcover_max))
                    self.list_products = list(self.products.items())

                elif platformname == 'Sentinel-1':
                    self.products = self.api.query(footprint,
                                                date=(self.startdate, self.enddate),
                                                platformname=platformname)
                    self.list_products = list(self.products.items())
                    
        print(len(self.list_products), ' products found')

    def download_files(self, list_product_ids,directory_path='.',unzip=True):
        for product_id in list_product_ids:
            self.api.download(product_id, directory_path=directory_path)

        if unzip:
            print('extracting files')
            file_names =glob(os.path.join(directory_path)+'/S*.zip')
            for filename in file_names:
                with zipfile.ZipFile(filename, 'r') as zip_ref:
                    zip_ref.extractall(directory_path)

    """ add function to display product AOI Polygon"""
    def run(self):
        print("Running  " + str(self.data_end) + str(self.data_start))
        api = SentinelAPI('biancasantana',
                          '988245535',
                          'https://scihub.copernicus.eu/dhus',
                          show_progressbars=True)
        footprint = geojson_to_wkt(read_geojson('Ufrj.geojson'))
        p = 'Sentinel-2'
        products = api.query(footprint,
                             date=(str(self.data_start), str(data_end)),
                             platformname=p,
                             cloudcoverpercentage='[0 TO 100]')
        print(type(products))

        products_df = api.to_dataframe(products)

        ids = products_df.index
        #correct abaixo
        ru = str(ids)
        io = (ru.replace("Index([", ""))
        reti_col = (io.replace("],", ""))
        ids_pasta = reti_col.replace("dtype='object')", "")

        b = "''"
        for i in range(0, len(b)):
            pro = ids_pasta.replace(b[i], "")

        b = ","
        for i in range(0, len(b)):
            pio = pro.replace(b[i], "")
        prin = pio.split()

        with open('sentinel_ids_download.csv',
                  mode='w',
                  encoding='utf-8',
                  newline='') as csv_file:

            fieldnames = ["id"]
            writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
            writer.writeheader()
            for idi in prin:
                writer.writerow({"id": idi})

        #correct acima
        products_df = api.to_dataframe(products)
        title = (products_df.set_index('title'))
        ids_txt = (title.index)
        ru = str(ids_txt)
        int = (ru.replace("Index([", ""))
        ret_para = (int.replace("dtype='object', name='title'", ""))
        reti_col = (ret_para.replace("],", ""))
        ids_pasta = reti_col.replace(")", "")
        b = "''"
        for i in range(0, len(b)):
            pro = ids_pasta.replace(b[i], "")
        print(pro)

        b = ","
        for i in range(0, len(b)):
            pio = pro.replace(b[i], "")
        prin = pio.split()
        with open('sentinel_certo.csv', mode='w', encoding='utf-8',
                  newline='') as csv_file:
            fieldnames = ["id"]
            writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
            writer.writeheader()
            for idi in prin:
                writer.writerow({"id": idi})
        #correct acim

        return products
args = parser.parse_args()



api = SentinelAPI(args.user, args.password, 'https://scihub.copernicus.eu/dhus')
shapeFile = gpd.read_file(args.pathShapeFile)
footprint = None
for i in shapeFile['geometry']:
    footprint = i
"""
En el caso de no funcionar con shp file utilizar un gejson de la misma region
"""
#footprint = geojson_to_wkt(read_geojson(pathGeojson))
products = api.query(footprint,
                     date = ('20170702','20190920'),
                     platformname = 'Sentinel-2',
                     processinglevel = 'Level-2A',
                     cloudcoverpercentage = (0,10)
                    )
products_gdf = api.to_geodataframe(products)
products_gdf_sorted = products_gdf.sort_values(['cloudcoverpercentage'], ascending=[True])



shapeFile_wgt = shapeFile.to_crs(epsg=4326)

number_download = 0
max_downloads = 4
dict_percentage = {}

for i in range(len(products_gdf_sorted)):
    actual_polygon = products_gdf_sorted[i:i+1]
Beispiel #29
0
        [-122.74628, 49.005211], [-122.74628, 49.362603],
        [-123.293536, 49.362603]]

m = Polygon([AREA])

object_name = "Vancouver"
with open(object_name + ".geojson", 'w') as f:
    json.dump(m, f)
footprint_geojson = geojson_to_wkt(read_geojson(object_name + ".geojson"))

user = input("USER NAME: ")
password = getpass("PASSWORD: "******"https://scihub.copernicus.eu/dhus")

m = folium.Map([(AREA[0][1] + AREA[len(AREA) - 1][1]) / 2,
                (AREA[0][0] + AREA[len(AREA) - 1][0]) / 2],
               zoom_start=10)
folium.GeoJson(object_name + '.geojson').add_to(m)
m.save(outfile="datamap.html")
# os.system('open datamap.html')

products = api.query(footprint_geojson,
                     date=('20201201', '20201221'),
                     platformname='Sentinel-2',
                     processinglevel='Level-2A',
                     cloudcoverpercentage=(0, 100))

products_gdf = api.to_geodataframe(products)
products_gdf_sorted = products_gdf.sort_values(['cloudcoverpercentage'],
                                               ascending=[True])
products_gdf_sorted
# connect to the API
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date

user = QInputDialog().getText(None, "User", "User please:")
password = QInputDialog().getText(None, "Password", "Password please:")


api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus')

point = QgsPointXY(-3.70256, 40.4165)
geom = QgsGeometry.fromPointXY(point).asWkt()


products = api.query(geom, platformname = 'Sentinel-2', cloudcoverpercentage = (0, 25),producttype='S2MSI1C',beginPosition='[2020-01-20T00:00:00.000Z TO 2020-02-01T23:59:59.999Z]')
json = api.to_geojson(products)

for i in json["features"]:
    summary = i['properties']['summary']
    print(summary)
    uuid = i['properties']['uuid']
    product = api.query(uuid=uuid)
    api.download_all(product, directory_path='C:\cursos')
    
    


Beispiel #31
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI("wrong_user", "wrong_password")
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401
Beispiel #32
0
class DownloadImages(object):
    '''
        This class is composed by a set of mehtods that download the Sentinel-2 images of
        a given Polygon and updates a diretory with the donnwloaded files.

        Parameters
        ---------

        username: str
            personal username in Copernicus / Sentinel API
        
        password: str
            personal password in Copernicus / Sentinel API
    '''
    def __init__(self, username, password):
        self.username = username
        self.password = password
        self.api = SentinelAPI(self.username, self.password,
                               'https://scihub.copernicus.eu/dhus')

    def get_products(self,
                     polygon,
                     initial_date,
                     final_date,
                     cloudcover=(0, 10)):
        '''
        Overview: returns geodataframe of query products from SentinelAPI.

        Inputs
            polygon: Shape
                polygon of interest for api query request.
            inital_date: str
                string in the format 'YMD' of the initial request date. 
            final_date - str 
                string in the format 'YMD' of the final request date.
            cloudcover: tuple, default=(0,10)
                minimum cloud coverage range for api request. 
        
        Output:
            geodataframe of query products.
        
        '''
        products = self.api.query(polygon,
                                  date=(initial_date, final_date),
                                  platformname='Sentinel-2',
                                  processinglevel='Level-2A',
                                  cloudcoverpercentage=cloudcover)
        products = self.api.to_geodataframe(products)
        return products

    def download_folders(self,
                         polygon,
                         regional_poly,
                         products,
                         database_path,
                         threshold=0.9):
        '''
        Overview: downloads folders of interest that are not in the current database

        Inputs
            polygon: Shape
                polygon of interest for area intersection calculation.
            regional_poly:
                bool to determine how intersection should be calculated, if region or not. 
            products: geodataframe
                dataframe with all products returned from AI query request.
            database_path: str
                path to the database, where folders are to be downloaded.
            threshold: float, default=0.9
                minimum area of intersection required with polygon to donwload folder.
        
        Output
            returns geodataframe with products that were downloaded.

        '''
        delete = []
        for idx, row in products.iterrows():
            intersect_area = polygon.intersection(row.geometry).area
            if regional_poly:
                area = intersect_area / row.geometry.area
            else:
                area = intersect_area / polygon.area
            if area >= threshold:
                if row.filename not in os.listdir(database_path):
                    #print('downloading: ', row.filename)
                    self.api.download(idx, directory_path=database_path)
                    filepath = os.path.join(database_path, row.title + '.zip')
                    with zipfile.ZipFile(filepath) as zip_ref:
                        zip_ref.extractall(path=database_path)
                else:
                    delete.append(idx)
            else:
                delete.append(idx)
        products = products.drop(delete)
        return products

    def update_downloaded(self, csv_path, downloaded_products):
        '''
        Overview: updates directory with downloaded folders.

        Inputs
            csv_path: str
                path to csv file, filepath.
            downloaded_products: geodataframe
                gdf of all new products that were downloaded.
        
        Output
            N/A.
        '''
        try:
            df = pd.read_csv(csv_path)
            df.append(downloaded_products)
            df.to_csv(csv_path)
        except:
            downloaded_products.to_csv(csv_path)

    def full_pipe(self,
                  csv_path,
                  polygon,
                  intial_date,
                  final_date,
                  database_path,
                  threshold=0.9,
                  cloudcover=(0, 10)):
        '''
        Overview: runs entire Download Images Pipeline, downloading and updating diretory with new products of interest.

        Inputs
            csv_path: str
                path to csv file, filepath.
            polygon: Shape
                polygon of interest for api query request and area threshold calculation to determine which folders to download.
            inital_date: str
                string in the format 'YMD' of the initial request date. 
            final_date - str 
                string in the format 'YMD' of the final request date.
            database_path: str
                path to the database, where folders are to be downloaded.
            threshold: float, default=0.9
                minimum area of intersection required with polygon to donwload folder.
            cloudcover: tuple, default=(0,10)
                minimum cloud coverage range for api request. 
        Output
            N/A.
            
        '''
        products = self.get_products(polygon, intial_date, final_date,
                                     cloudcover)
        products = self.download_folders(polygon, products, database_path,
                                         threshold)
        self.update_downloaded(csv_path, products)
Beispiel #33
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt


# -------------------------------------------
# necessary information:
user = '******'
password = '******'

# YYYYMMDD
start_date = '20150101'
end_date = '20180207'

# map.geojson with boundary coordinates
# just generate and save as "map.geojson" using: --- http://geojson.io ---
geojson_path = 'directory\\to\\the\\file\\map.geojson'


# where to save the data
save_path = 'directory\\to\\the\\save_folder'


# -------------------------------------------

# connect to the API / SentinelHub
api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus', show_progressbars=True)
footprint = geojson_to_wkt(read_geojson(geojson_path))
products = api.query(footprint,date=(start_date, end_date), platformname='Sentinel-2', producttype='S2MSI2Ap')
print 'Number of images: {}'.format(len(products))
api.download_all(products, save_path)

Beispiel #34
0
# connect to the API
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date

api = SentinelAPI('kongstad', 'Delphini1', 'https://scihub.copernicus.eu/dhus')

# search by polygon, time, and SciHub query keywords
footprint = geojson_to_wkt(read_geojson('geojson/scoresbysund_small.geojson'))
products = api.query(footprint,
                     date=('20170826', date(2017, 8, 30)),
                     platformname='Sentinel-2',
                     cloudcoverpercentage=(0, 5))

api.to_geojson(products)
# download all results from the search
#api.download_all(products,'sentinel2images/')
Beispiel #35
0

# Set up API
api = SentinelAPI(user='******', password='******', api_url='https://s5phub.copernicus.eu/dhus')

# Define area of interest in WKT format
AOI = ''

# Date
startdate = '20200101'
enddate = '20200430'

# Download list of Sentinel S5-P NO2 products in region of interest
products = api.query(AOI,
                      date=(startdate,enddate),
                     platformname='Sentinel-5',
                     producttype='L2__NO2___',
                     processingmode='Offline', # 'Near real time' or 'Offline'
                     )

# Convert to pandas dataframe for ease of use
products_df = api.to_dataframe(products)

# Convert AOI to shapely file
AOIshape = wkt.loads(AOI)

# Create empty list of overlaping geometries
differences = []

# Check which images don't have complete overlap with AOI
for image in range(len(products_df)):
    
Beispiel #36
0
def sent2_query(user,
                passwd,
                geojsonfile,
                start_date,
                end_date,
                cloud='100',
                output_folder=None,
                api=True):
    """

    A convenience function that wraps sentinelsat query & download 
            
    Notes
    -----------
    
    I have found the sentinesat sometimes fails to download the second image,
    so I have written some code to avoid this - choose api = False for this
        
    Parameters
    -----------
    
    user : string
           username for esa hub
        
    passwd : string
             password for hub
        
    geojsonfile : string
                  AOI polygon of interest
    
    start_date : string
                 date of beginning of search
    
    end_date : string
               date of end of search
    
    output_folder : string
                    where you intend to download the imagery
    
    cloud : string (optional)
            include a cloud filter in the search

   
    """
    ##set up your copernicus username and password details, and copernicus download site... BE CAREFUL if you share this script with others though!
    api = SentinelAPI(user, passwd)

    # NOWT WRONG WITH API -
    # TODO Maybe improve check of library so it doesn't use a global
    #    if oldsat is True:
    #        footprint = get_coordinates(geojsonfile)
    #    else:
    footprint = geojson_to_wkt(read_geojson(geojsonfile))
    products = api.query(footprint, ((start_date, end_date)),
                         platformname="Sentinel-2",
                         cloudcoverpercentage="[0 TO " + cloud +
                         "]")  #,producttype="GRD")
    products_df = api.to_dataframe(products)
    if api is True and output_folder != None:

        api.download_all(directory_path=output_folder)

    else:
        prods = np.arange(len(products))
        # the api was proving flaky whereas the cmd line always works hence this
        # is alternate the download option
        if output_folder != None:
            #            procList = []
            for prod in prods:
                #os.chdir(output_folder)
                sceneID = products[prod]['id']
                cmd = [
                    'sentinel', 'download', '-p', output_folder, user, passwd,
                    sceneID
                ]
                print(sceneID + ' downloading')
                subprocess.call(cmd)

            #[p.wait() for p in procList]
    return products_df, products
api = SentinelAPI('ahui0911', '19940911', 'https://scihub.copernicus.eu/dhus')

footprint = geojson_to_wkt(
    read_geojson(
        r'F:\snappy_InSAR_code\GeoJOSN\stockholm_DSC_dw_scene.geojson'))

# products = api.query(footprint, date=('20170501', '20171031'), platformname = 'Sentinel-1',
#           producttype='SLC', relativeorbitnumber = 102, orbitdirection="DESCENDING")

# Launch Date -> S1A: 2014-04-03, S1B: 2016-04-25
### DSC rorb = 22
products = api.query(footprint,
                     date=('20150501', '20151031'),
                     platformname='Sentinel-1',
                     producttype='SLC',
                     relativeorbitnumber=22,
                     orbitdirection="DESCENDING",
                     order_by='+beginposition',
                     filename='S1A*')

### ASC rorb = 102
# products = api.query(footprint, date=('20170501', '20171031'), platformname = 'Sentinel-1',
#           producttype='SLC', relativeorbitnumber = 102, orbitdirection="ASCENDING", order_by='+beginposition')

print("Total Number of Searched Products:" + str(len(products.keys())))
# api.download_all(products, savePath)

if not os.path.exists(savePath):
    os.mkdir(savePath)

### If a product doesn't exist, then download it.
Beispiel #38
0
def sent2_amazon(user,
                 passwd,
                 geojsonfile,
                 start_date,
                 end_date,
                 output_folder,
                 tile=None,
                 cloud='100'):
    """  
    Query the ESA catalogue then download S2 from AWS with correct renaming of stuff
    Uses joblib to parallelise multiple files from aws
    
    Way quicker than ESA-based download
    
    Notes:
    ------------------------
        
    Credit to sentinelsat for the query aspect of this function, and 
    sentinelhub for the AWS aspect. 
    
    
    Parameters
    ----------
    
    user : string
           username for esa hub
        
    passwd : string
             password for hub
        
    geojsonfile : string
                  AOI polygon of interest
    
    start_date : string
                 date of beginning of search
    
    end_date : string
               date of end of search
    
    output_folder : string
                    where you intend to download the imagery
        
    tile : string
           S2 tile 
    
    cloud : string (optional)
            include a cloud filter in the search
    

    
    """

    # Examples of sentinehub usage:
    #download_safe_format('S2A_OPER_PRD_MSIL1C_PDMC_20160121T043931_R069_V20160103T171947_20160103T171947')
    #download_safe_format('S2A_MSIL1C_20170414T003551_N0204_R016_T54HVH_20170414T003551')
    #download_safe_format(tile=('T38TML','2015-12-19'), entire_product=True)
    #entire prod really mean whole tile in old format! Avoid!
    #download_safe_format(tile=('T54HVH','2017-04-14'))

    # Use sentinel sat to query
    api = SentinelAPI(user, passwd)

    #    if oldsat is True:
    #        footprint = get_coordinates(geojsonfile)
    #    else:
    footprint = geojson_to_wkt(read_geojson(geojsonfile))
    products = api.query(footprint, ((start_date, end_date)),
                         platformname="Sentinel-2",
                         cloudcoverpercentage="[0 TO " + cloud +
                         "]")  #,producttype="GRD")

    products_df = api.to_dataframe(products)

    # If using an aoi shape this is the option to follow at present until I
    # write a native function
    if tile is None:
        Parallel(n_jobs=-1, verbose=2)(
            delayed(download_safe_format)(i, folder=output_folder)
            for i in products_df.identifier)
    # If the tile id is known then use this - likely handy for oldfmt
    else:

        # A kludge for now until I spend more than two mins writing this func
        dateList = []
        for prod in products_df['ingestiondate']:
            date1 = prod.strftime('%Y-%m-%d')
            dateList.append(date1)

        Parallel(n_jobs=-1, verbose=2)(
            delayed(download_safe_format)(tile=(tile, i), folder=output_folder)
            for i in dateList)
    return products_df, products
Beispiel #39
0
def test_invalid_query():
    api = SentinelAPI(**_api_auth)
    with pytest.raises(SentinelAPIError):
        api.query(raw="xxx:yyy")
    def _handler(self, request, response):
        response.update_status("start fetching resource", 10)

        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        # products = [inpt.data for inpt in request.inputs['indices']]

        indice = request.inputs['indices'][0].data

        bbox = []  # order xmin ymin xmax ymax
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        if 'end' in request.inputs:
            end = request.inputs['end'][0].data
            end = dt.combine(end, time(23, 59, 59))
        else:
            end = dt.now()

        if 'start' in request.inputs:
            start = request.inputs['start'][0].data
            start = dt.combine(start, time(0, 0, 0))
        else:
            start = end - timedelta(days=30)

        if (start > end):
            start = dt.now() - timedelta(days=30)
            end = dt.now()
            LOGGER.exception(
                "periode end befor periode start, period is set to the last 30 days from now"
            )

        username = request.inputs['username'][0].data
        password = request.inputs['password'][0].data
        cloud_cover = request.inputs['cloud_cover'][0].data

        api = SentinelAPI(username, password)

        geom = {
            "type":
            "Polygon",
            "coordinates": [[[bbox[0], bbox[1]], [bbox[2], bbox[1]],
                             [bbox[2], bbox[3]], [bbox[0], bbox[3]],
                             [bbox[0], bbox[1]]]]
        }

        footprint = geojson_to_wkt(geom)

        response.update_status("start searching tiles acording query", 15)

        products = api.query(
            footprint,
            date=(start, end),
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, cloud_cover),
            # producttype='SLC',
            # orbitdirection='ASCENDING',
        )

        LOGGER.debug('%s products found' % len(products.keys()))
        DIR_cache = cache_path()
        DIR_EO = join(DIR_cache, 'scihub.copernicus')
        if not exists(DIR_EO):
            makedirs(DIR_EO)

        # api.download_all(products)
        # try:
        # with open(filepathes, 'w') as fp:
        #     fp.write('############################################\n')
        #     fp.write('###     Following files are fetched      ###\n')
        #     fp.write('############################################\n')
        #     fp.write('\n')

        resources = []

        for key in products.keys():
            try:
                filename = products[key]['filename']
                # form = products[key]['format']
                ID = str(products[key]['identifier'])
                file_zip = join(DIR_EO, '%s.zip' % (ID))
                DIR_tile = join(DIR_EO, '%s' % (filename))
                response.update_status("fetch file %s" % ID, 20)
                LOGGER.debug('path: %s' % DIR_tile)
                if exists(file_zip):
                    LOGGER.debug('file %s.zip already fetched' % ID)
                else:
                    try:
                        api.download(key, directory_path=DIR_EO)
                        response.update_status(
                            "***%s sucessfully fetched" % ID, 20)
                        LOGGER.debug('Tile %s fetched' % ID)
                        LOGGER.debug('Files %s fetched ' % ID)
                    except:
                        LOGGER.exception('failed to extract file %s' %
                                         filename)
                if exists(DIR_tile):
                    LOGGER.debug('file %s already unzipped' % filename)
                else:
                    try:
                        # zipfile = join(DIR_EO, '%szip' % (filename)).strip(form)
                        zip_ref = zipfile.ZipFile(file_zip, 'r')
                        zip_ref.extractall(DIR_EO)
                        zip_ref.close()
                        LOGGER.debug('Tile %s unzipped' % ID)
                    except:
                        LOGGER.exception('failed to extract %s ' % file_zip)
                resources.append(DIR_tile)
            except:
                LOGGER.exception('failed to fetch %s' % key)

        size = float(products[key]['size'].split(' ')[0])
        producttype = products[key]['producttype']
        beginposition = str(products[key]['beginposition'])

        imgs = []
        tiles = []
        for resource in resources:
            try:
                response.update_status("Calculating %s indices " % (indice),
                                       40)
                if indice == 'NDVI':
                    LOGGER.debug('Calculate NDVI for %s', resource)
                    tile = eodata.get_ndvi(resource)
                    LOGGER.debug('resources BAI calculated')
                if indice == 'BAI':
                    LOGGER.debug('Calculate BAI for %s', resource)
                    tile = eodata.get_bai(resource)
                    LOGGER.debug('resources BAI calculated')
                tiles.append(tile)
            except:
                LOGGER.exception('failed to calculate indice for %s ' %
                                 resource)

        for tile in tiles:
            try:
                LOGGER.debug("Plot tile %s" % tile)
                img = eodata.plot_band(tile,
                                       file_extension='PNG',
                                       colorscheem=indice)
                imgs.append(img)
            except:
                LOGGER.exception("Failed de plot tile %s " % tile)

        from flyingpigeon.utils import archive
        tarf = archive(imgs)

        response.outputs['output_archive'].file = tarf

        i = next((i for i, x in enumerate(imgs) if x), None)
        if i is None:
            i = "dummy.png"
        response.outputs['output_plot'].file = imgs[i]

        # from flyingpigeon import visualisation as vs
        #
        # images = vs.concat_images(imgs, orientation='v')

        response.update_status("done", 100)
        return response
Beispiel #41
0
class ApiSession():
    """
    ApiSession Class handles all connections with SentinelAPI.
    """
    def __init__(self):
        self.key_file_path = '../ressources/apiKey.txt'
        self.export_directory = (os.path.normpath(
            os.path.join(os.getcwd(), os.pardir, 'downloadedImages')))
        self.user, self.password = self.parsefile()
        self.api = SentinelAPI(self.user, self.password, query['url'])
        self.platform = 'Sentinel-2'

    def parsefile(self):
        """
        Parses the apiKey.txt and returns a tuple containing the username
        and password for the SentinelAPI.
        """
        text = ''
        try:
            text = open(self.key_file_path, 'r')
        except FileExistsError as exception:
            print('Api key file not found, must be in ressources/apiKey.txt')
            print('Raised Error: {}'.format(exception))

        info = []
        for line in text:
            info.append(line.strip().split(',')[1])

        return (info[0], info[1])

    def query(self, footprint):
        """
        Queries the SentinelAPI and returns a geojson containing data
        candidates.
        """
        return self.api.query(footprint,
                              date=(query['begin'], query['end']),
                              platformname=query['platform'],
                              processinglevel=query['processing'])

    def to_geo_df(self, product):
        """
        Returns GeoDataFrame
        """
        return self.api.to_geodataframe(product)

    def download(self, link, directory):
        """
        Dowloads Data to directory using link provided.
        """
        self.api.download(link, directory_path=directory)

    def query_to_dataframe(self, footprint, output_path, contains=True):
        """
        Saves the queried geopandas to a csv file so that it could be
        used in the future.
        contains: if true will only save the links that fully contain the
            footprint
        """
        catalog = self.query(footprint)
        catalog = self.to_geo_df(catalog)
        desired_columns = [
            'summary', 'vegetationpercentage', 'notvegetatedpercentage',
            'waterpercentage', 'unclassifiedpercentage', 'snowicepercentage',
            'cloudcoverpercentage', 'geometry', 'size'
        ]
        filtered_catalog = catalog[desired_columns]
        if contains:
            contains = [
                footprint.within(geometry) for geometry in catalog['geometry']
            ]
            filtered_catalog = filtered_catalog[contains]

        output = filtered_catalog.to_csv()
        output_file = open(output_path, 'w')
        output_file.write(output)
        output_file.close()
Beispiel #42
0
import zipfile

from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt

import cfg
import glob

api = SentinelAPI(cfg.copernicus["user"], cfg.copernicus["password"], 'https://scihub.copernicus.eu/dhus')
outdir = '/download_storage/S2_L2A_MSI_ARD/BoyacaImages/'


footprint = geojson_to_wkt(read_geojson('/origin/S2_L2A_MSI_ARD/Search Polygons/BoyacaCentral.geojson'))

products = api.query(footprint,
                     date=('20200919', '20201109'),
                     platformname='Sentinel-2',
                     producttype= 'S2MSI2A',
                     cloudcoverpercentage=(0, 90))
"""
date (tuple of (str or datetime) or str, optional) --
A time interval filter based on the Sensing Start Time of the products. Expects a tuple of (start, end), e.g. (“NOW-1DAY”, “NOW”). The timestamps can be either a Python datetime or a string in one of the following formats:

yyyyMMdd
yyyy-MM-ddThh:mm:ss.SSSZ (ISO-8601)
yyyy-MM-ddThh:mm:ssZ
NOW
NOW-<n>DAY(S) (or HOUR(S), MONTH(S), etc.)
NOW+<n>DAY(S)
yyyy-MM-ddThh:mm:ssZ-<n>DAY(S)
NOW/DAY (or HOUR, MONTH etc.) - rounds the value to the given unit
Alternatively, an already fully formatted string such as “[NOW-1DAY TO NOW]” can be used as well.
Beispiel #43
0
def test_invalid_query():
    api = SentinelAPI(**_api_auth)
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(raw="xxx:yyy")
Beispiel #44
0
import os
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
mypath = r'C:\Users\mccoy\PycharmProjects\untitled\gz_2010_us_outline_500k.json'
# products = api.query(footprint,
#                      producttype='SLC',
#                      orbitdirection='ASCENDING')
# api.download_all(products)

testAPI = SentinelAPI(user='******', password='******', api_url='https://s5phub.copernicus.eu/dhus')
footprint = geojson_to_wkt(read_geojson(mypath))
#products = testAPI.query(area = footprint, date = "[NOW-20DAYS TO NOW]", platformname='Sentinel-5p')

#Pollution map for the US
products = testAPI.query(area = footprint, date = "[NOW-1DAYS TO NOW]",producttype='L2__NO2___')

# download all results from the search
mypath = testAPI.download_all(products)
downloadedfile = mypath[0][next(iter(mypath[0]))]['path']
dir_path = os.path.dirname(os.path.realpath(__file__))
downloadedfile_full = dir_path + downloadedfile

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
testAPI.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
api.to_geodataframe(products)

#Get all data for the whole world
#products = testAPI.query(date = "[NOW-1DAYS TO NOW]")

def query_sentinel_data(cfg, save_json=True):
    workpath = Path(os.getcwd())

    # api = SentinelAPI('puzhao', 'kth10044ESA!', 'https://scihub.copernicus.eu/dhus')
    # user, password = "******", "19940911"

    api = SentinelAPI(cfg.user, cfg.password,
                      'https://scihub.copernicus.eu/dhus')

    now = datetime.now().strftime("%Y-%m-%dT%H%M%S")
    today = datetime.today().strftime("%Y-%m-%d")
    if cfg.start_date is None:
        cfg.start_date = (datetime.today() +
                          timedelta(-1)).strftime("%Y-%m-%d")
    if cfg.end_date is None:
        cfg.end_date = (datetime.today() + timedelta(1)).strftime("%Y-%m-%d")
    print("now: ", now)

    cfg.update({
        "roi_url": cfg.roi_url,
        'placename': "British Columbia",
        "query_by": "roi",  # 'place' has problem
        "query_date": today,
        "start_date": cfg.start_date,
        "end_date": cfg.end_date,
        "platformname": cfg.platformname,  # Sentinel-2
        "producttype": cfg.producttype,  # S2MSI1C, S2MSI2A

        # 'relativeorbitnumber': 84,
        # "orbitdirection": "ASCENDING",
        "download_flag": cfg.download_flag,
        "download_one": True,  # download one by one
        "download_all": True,  # download all once
    })

    pprint(cfg)

    Sat_Abb_Dict = {'Sentinel-1': 'S1', 'Sentinel-2': 'S2', 'Sentinel-3': 'S3'}
    SAT = Sat_Abb_Dict[cfg.platformname]

    datafolder = Path(cfg.datafolder)
    savePath = datafolder / "data" / f"{SAT}_{cfg.producttype}"
    if not os.path.exists(savePath): os.makedirs(savePath)

    cfg.update({"sat_folder": str(savePath)})

    cfg.download_all = False if cfg.download_one else True
    cfg.download_all = cfg.download_all and cfg.download_flag
    cfg.download_one = cfg.download_one and cfg.download_flag

    if cfg.query_by == "roi":
        footprint = geojson_to_wkt(read_geojson(str(workpath / cfg.roi_url)))
        roi_name = os.path.split(cfg.roi_url)[-1].split(".")[0]

    if cfg.query_by == "place":
        footprint = placename_to_wkt(cfg.placename)
        roi_name = cfg.placename.replace(" ", "_")
    # print(BC)

    ### DSC rorb = 22
    if cfg.platformname == "Sentinel-1":
        cfg.checkProperty = "system:index"
        cfg.check_eeImgCol = "COPERNICUS/S1_GRD"

        products = api.query(
            footprint,
            date=(cfg.start_date.replace("-",
                                         ""), cfg.end_date.replace("-", "")),
            platformname=cfg.platformname,
            producttype=cfg.producttype,
            order_by='+beginposition',
        )

    else:  # S2, S3 ...
        cfg.checkProperty = "PRODUCT_ID"
        cfg.check_eeImgCol = "COPERNICUS/S2" if 'S2MSI1C' == cfg.producttype else "COPERNICUS/S2_SR"

        products = api.query(
            footprint,
            date=(cfg.start_date.replace("-",
                                         ""), cfg.end_date.replace("-", "")),
            platformname=cfg.platformname,
            producttype=cfg.producttype,
            order_by='+beginposition',
            cloudcoverpercentage=(0, cfg.cloudcoverpercentage),  # for S2 only
        )

    # print(products['0c05435b-0cd3-45a0-93f4-8c317eb1d558'])
    print("\n\n===========> Sentinel Auto-Query <============")

    products_df = api.to_dataframe(products)
    # print(products_df.keys())
    # print(products_df.index)
    # pprint(products_df[['sensoroperationalmode', 'orbitdirection', 'relativeorbitnumber']])

    products_dict = products_df.transpose().to_dict()

    products_list = products_df.index.tolist()
    if len(products_list) > 0:
        example_dict = products_dict[products_list[0]]
        property_list = [
            key for key in example_dict.keys()
            if is_jsonable(example_dict[key])
        ]
    # pprint(products_dict.keys())

    # select property for saving to json
    orbit_dict = {'ASCENDING': 'ASC', 'DESCENDING': 'DSC'}
    products_QueryInfo = edict()
    checkImgCol = ee.ImageCollection(f"{cfg.check_eeImgCol}")

    if SAT == "S1":
        sentinel_asset = ee.ImageCollection("users/omegazhangpzh/Sentinel1")
    if SAT == "S2":
        sentinel_asset = ee.ImageCollection("users/omegazhangpzh/Sentinel2")

    for product_id in products_dict.keys():
        title = products_dict[product_id]['title']
        filtered_size = ee.Number(checkImgCol.filter(ee.Filter.eq(cfg.checkProperty, title)).size())\
                    .add(sentinel_asset.filter(ee.Filter.eq(cfg.checkProperty, title)).size()).getInfo()
        flag = filtered_size > 0
        print(title, flag)

        # flag = False
        if not flag:  # if this product is not available in GEE
            # print(title)
            # print(title, flag.getInfo())
            products_QueryInfo[title] = {
                key: products_dict[product_id][key]
                for key in property_list
            }
            # products_QueryInfo[title]['product_id'] = product_id

            orbit_direction = products_dict[product_id]['orbitdirection']
            orbit_num = products_dict[product_id]['relativeorbitnumber']

            products_QueryInfo[title]['orbit_key'] = orbit_dict[
                orbit_direction] + "_" + str(orbit_num)

    QueryInfo = edict()
    QueryInfo["products"] = products_QueryInfo

    QueryInfo["results"] = edict()
    QueryInfo["results"]['total_number'] = len(products_QueryInfo.keys())
    QueryInfo["results"]['products_list'] = sorted(
        list(products_QueryInfo.keys()))
    QueryInfo["results"]['orbKey_list'] = list(
        set([
            products_QueryInfo[product]['orbit_key']
            for product in list(products_QueryInfo.keys())
        ]))

    QueryInfo["cfg"] = cfg

    # roi_name = os.path.split(cfg.roi_url)[-1].split(".")[0]
    jsonPath = datafolder / "outputs" / roi_name
    if not os.path.exists(str(jsonPath)):
        os.makedirs(jsonPath)

    if save_json:
        """ save as json """
        json_url = jsonPath / f"{SAT}_{cfg.producttype}_{now}.json"
        print("\njson_url: " + str(json_url))

        with open(str(json_url), 'w') as fp:
            json.dump(edict(QueryInfo), fp, ensure_ascii=False, indent=4)
        """ save as geojson """
        import geojson
        with open(jsonPath / f"S1_{cfg.producttype}_{now}.geojson", 'w') as fp:
            geojson.dump(api.to_geojson(products), fp, indent=4)

    print()
    # print(footprint)
    print("now: ", now)
    print("Total Number of Searched Products:" +
          str(len(QueryInfo["results"]['products_list'])))
    pprint(QueryInfo["results"]['products_list'])

    return QueryInfo
m = folium.Map([(AREA[0][1] + AREA[len(AREA) - 1][1]) / 2,
                (AREA[0][0] + AREA[len(AREA) - 1][0]) / 2],
               zoom_start=10)

folium.GeoJson(str(object_name) + '.geojson').add_to(m)
m

#光学画像であるSentinel-2の画像を取得するため、既に指定している「場所」の情報以外である以下4つ:
#・対象とする衛星
#・期間
#・データの処理レベル
#・被雲率
products = api.query(
    footprint_geojson,
    date=('20200601', '20200701'),  #取得希望期間の入力
    platformname='Sentinel-2',
    processinglevel='Level-2A',
    cloudcoverpercentage=(0, 100))  #被雲率(0%〜100%)

len(products)

products_gdf = api.to_geodataframe(products)
products_gdf_sorted = products_gdf.sort_values(['cloudcoverpercentage'],
                                               ascending=[True])
products_gdf_sorted

products_gdf_sorted.head()

uuid = products_gdf_sorted.iloc[0]["uuid"]
product_title = products_gdf_sorted.iloc[0]["title"]
api.download(uuid)
Beispiel #47
0
class SentinelDownloader(object):
    def __init__(self,
                 user,
                 password,
                 api_url='https://scihub.copernicus.eu/apihub'):

        self._apiname = api_url
        self._user = user
        self._password = password

        # init logger
        root = logging.getLogger()
        root.addHandler(logging.StreamHandler(sys.stderr))
        if self._apiname == 'https://scihub.copernicus.eu/apihub':
            try:
                from sentinelsat import SentinelAPI
            except ImportError as e:
                gs.fatal(
                    _("Module requires sentinelsat library: {}").format(e))
            # connect SciHub via API
            self._api = SentinelAPI(self._user,
                                    self._password,
                                    api_url=self._apiname)
        elif self._apiname == 'USGS_EE':
            try:
                import landsatxplore.api
                from landsatxplore.errors import EarthExplorerError
            except ImportError as e:
                gs.fatal(
                    _("Module requires landsatxplore library: {}").format(e))
            api_login = False
            while api_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    self._api = landsatxplore.api.API(self._user,
                                                      self._password)
                    api_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
        self._products_df_sorted = None

    def filter(self,
               area,
               area_relation,
               clouds=None,
               producttype=None,
               limit=None,
               query={},
               start=None,
               end=None,
               sortby=[],
               asc=True,
               relativeorbitnumber=None):
        args = {}
        if clouds:
            args['cloudcoverpercentage'] = (0, int(clouds))
        if relativeorbitnumber:
            args['relativeorbitnumber'] = relativeorbitnumber
            if producttype.startswith('S2') and int(relativeorbitnumber) > 143:
                gs.warning("This relative orbit number is out of range")
            elif int(relativeorbitnumber) > 175:
                gs.warning("This relative orbit number is out of range")
        if producttype:
            args['producttype'] = producttype
            if producttype.startswith('S2'):
                args['platformname'] = 'Sentinel-2'
            else:
                args['platformname'] = 'Sentinel-1'
        if not start:
            start = 'NOW-60DAYS'
        else:
            start = start.replace('-', '')
        if not end:
            end = 'NOW'
        else:
            end = end.replace('-', '')
        if query:
            redefined = [
                value for value in args.keys() if value in query.keys()
            ]
            if redefined:
                gs.warning(
                    "Query overrides already defined options ({})".format(
                        ','.join(redefined)))
            args.update(query)
        gs.verbose(
            "Query: area={} area_relation={} date=({}, {}) args={}".format(
                area, area_relation, start, end, args))
        products = self._api.query(area=area,
                                   area_relation=area_relation,
                                   date=(start, end),
                                   **args)
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_('No product found'))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby, ascending=[asc] * len(sortby))
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(
                int(limit))

        gs.message(
            _('{} Sentinel product(s) found').format(
                len(self._products_df_sorted)))

    def list(self):
        if self._products_df_sorted is None:
            return
        id_kw = ('uuid', 'entity_id')
        identifier_kw = ('identifier', 'display_id')
        cloud_kw = ('cloudcoverpercentage', 'cloud_cover')
        time_kw = ('beginposition', 'acquisition_date')
        kw_idx = 1 if self._apiname == 'USGS_EE' else 0
        for idx in range(len(self._products_df_sorted[id_kw[kw_idx]])):
            if cloud_kw[kw_idx] in self._products_df_sorted:
                ccp = '{0:2.0f}%'.format(
                    float(self._products_df_sorted[cloud_kw[kw_idx]][idx]))
            else:
                ccp = 'cloudcover_NA'

            print_str = '{0} {1}'.format(
                self._products_df_sorted[id_kw[kw_idx]][idx],
                self._products_df_sorted[identifier_kw[kw_idx]][idx])
            if kw_idx == 1:
                time_string = self._products_df_sorted[time_kw[kw_idx]][idx]
            else:
                time_string = self._products_df_sorted[
                    time_kw[kw_idx]][idx].strftime("%Y-%m-%dT%H:%M:%SZ")
            print_str += ' {0} {1}'.format(time_string, ccp)
            if kw_idx == 0:
                print_str += ' {0}'.format(
                    self._products_df_sorted['producttype'][idx])

            print(print_str)

    def download(self, output, sleep=False, maxretry=False):
        if self._products_df_sorted is None:
            return

        if not os.path.exists(output):
            os.makedirs(output)
        gs.message(_('Downloading data into <{}>...').format(output))
        if self._apiname == 'USGS_EE':
            from landsatxplore.earthexplorer import EarthExplorer
            #from landsatxplore.exceptions import EarthExplorerError
            from landsatxplore.errors import EarthExplorerError
            from zipfile import ZipFile
            ee_login = False
            while ee_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    ee = EarthExplorer(self._user, self._password)
                    ee_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
            for idx in range(len(self._products_df_sorted['entity_id'])):
                scene = self._products_df_sorted['entity_id'][idx]
                identifier = self._products_df_sorted['display_id'][idx]
                zip_file = os.path.join(output, '{}.zip'.format(identifier))
                gs.message('Downloading {}...'.format(identifier))
                try:
                    ee.download(identifier=identifier,
                                output_dir=output,
                                timeout=600)
                except EarthExplorerError as e:
                    gs.fatal(_(e))
                ee.logout()
                # extract .zip to get "usual" .SAFE
                with ZipFile(zip_file, 'r') as zip:
                    safe_name = zip.namelist()[0].split('/')[0]
                    outpath = os.path.join(output, safe_name)
                    zip.extractall(path=output)
                gs.message(_('Downloaded to <{}>').format(outpath))
                try:
                    os.remove(zip_file)
                except Exception as e:
                    gs.warning(
                        _('Unable to remove {0}:{1}').format(zip_file, e))

        else:
            for idx in range(len(self._products_df_sorted['uuid'])):
                gs.message('{} -> {}.SAFE'.format(
                    self._products_df_sorted['uuid'][idx],
                    os.path.join(output,
                                 self._products_df_sorted['identifier'][idx])))
                # download
                out = self._api.download(self._products_df_sorted['uuid'][idx],
                                         output)
                if sleep:
                    x = 1
                    online = out['Online']
                    while not online:
                        # sleep is in minutes so multiply by 60
                        time.sleep(int(sleep) * 60)
                        out = self._api.download(
                            self._products_df_sorted['uuid'][idx], output)
                        x += 1
                        if x > maxretry:
                            online = True

    def save_footprints(self, map_name):
        if self._products_df_sorted is None:
            return
        if self._apiname == 'USGS_EE':
            gs.fatal(
                _('USGS Earth Explorer does not support footprint download.'))
        try:
            from osgeo import ogr, osr
        except ImportError as e:
            gs.fatal(
                _("Option <footprints> requires GDAL library: {}").format(e))

        gs.message(_("Writing footprints into <{}>...").format(map_name))
        driver = ogr.GetDriverByName("GPKG")
        tmp_name = gs.tempfile() + '.gpkg'
        data_source = driver.CreateDataSource(tmp_name)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        # features can be polygons or multi-polygons
        layer = data_source.CreateLayer(str(map_name), srs,
                                        ogr.wkbMultiPolygon)

        # attributes
        attrs = OrderedDict([("uuid", ogr.OFTString),
                             ("ingestiondate", ogr.OFTString),
                             ("cloudcoverpercentage", ogr.OFTInteger),
                             ("producttype", ogr.OFTString),
                             ("identifier", ogr.OFTString)])

        # Sentinel-1 data does not have cloudcoverpercentage
        prod_types = [type for type in self._products_df_sorted["producttype"]]
        s1_types = ["SLC", "GRD"]
        if any(type in prod_types for type in s1_types):
            del attrs["cloudcoverpercentage"]

        for key in attrs.keys():
            field = ogr.FieldDefn(key, attrs[key])
            layer.CreateField(field)

        # features
        for idx in range(len(self._products_df_sorted['uuid'])):
            wkt = self._products_df_sorted['footprint'][idx]
            feature = ogr.Feature(layer.GetLayerDefn())
            newgeom = ogr.CreateGeometryFromWkt(wkt)
            # convert polygons to multi-polygons
            newgeomtype = ogr.GT_Flatten(newgeom.GetGeometryType())
            if newgeomtype == ogr.wkbPolygon:
                multigeom = ogr.Geometry(ogr.wkbMultiPolygon)
                multigeom.AddGeometryDirectly(newgeom)
                feature.SetGeometry(multigeom)
            else:
                feature.SetGeometry(newgeom)
            for key in attrs.keys():
                if key == 'ingestiondate':
                    value = self._products_df_sorted[key][idx].strftime(
                        "%Y-%m-%dT%H:%M:%SZ")
                else:
                    value = self._products_df_sorted[key][idx]
                feature.SetField(key, value)
            layer.CreateFeature(feature)
            feature = None

        data_source = None

        # coordinates of footprints are in WKT -> fp precision issues
        # -> snap
        gs.run_command('v.import',
                       input=tmp_name,
                       output=map_name,
                       layer=map_name,
                       snap=1e-10,
                       quiet=True)

    def get_products_from_uuid_usgs(self, uuid_list):
        scenes = []
        for uuid in uuid_list:
            metadata = self._api.metadata(uuid, 'SENTINEL_2A')
            scenes.append(metadata)
        scenes_df = pandas.DataFrame.from_dict(scenes)
        self._products_df_sorted = scenes_df
        gs.message(
            _('{} Sentinel product(s) found').format(
                len(self._products_df_sorted)))

    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        if self._apiname == 'USGS_EE':
            self.get_products_from_uuid_usgs(uuid_list)
        else:
            from sentinelsat.sentinel import SentinelAPIError

            self._products_df_sorted = {'uuid': []}
            for uuid in uuid_list:
                try:
                    odata = self._api.get_product_odata(uuid, full=True)
                except SentinelAPIError as e:
                    gs.error('{0}. UUID {1} skipped'.format(e, uuid))
                    continue

                for k, v in odata.items():
                    if k == 'id':
                        k = 'uuid'
                    elif k == 'Sensing start':
                        k = 'beginposition'
                    elif k == 'Product type':
                        k = 'producttype'
                    elif k == 'Cloud cover percentage':
                        k = 'cloudcoverpercentage'
                    elif k == 'Identifier':
                        k = 'identifier'
                    elif k == 'Ingestion Date':
                        k = 'ingestiondate'
                    elif k == 'footprint':
                        pass
                    else:
                        continue
                    if k not in self._products_df_sorted:
                        self._products_df_sorted[k] = []
                    self._products_df_sorted[k].append(v)

    def filter_USGS(self,
                    area,
                    area_relation,
                    clouds=None,
                    producttype=None,
                    limit=None,
                    query={},
                    start=None,
                    end=None,
                    sortby=[],
                    asc=True,
                    relativeorbitnumber=None):
        if area_relation != 'Intersects':
            gs.fatal(
                _('USGS Earth Explorer only supports area_relation'
                  ' "Intersects"'))
        if relativeorbitnumber:
            gs.fatal(
                _('USGS Earth Explorer does not support "relativeorbitnumber"'
                  ' option.'))
        if producttype and producttype != 'S2MSI1C':
            gs.fatal(
                _('USGS Earth Explorer only supports producttype S2MSI1C'))
        if query:
            if not any(
                    key in query
                    for key in ['identifier', 'filename', 'usgs_identifier']):
                gs.fatal(
                    _('USGS Earth Explorer only supports query options'
                      ' "filename", "identifier" or "usgs_identifier".'))
            if 'usgs_identifier' in query:
                # get entityId from usgs identifier and directly save results
                usgs_id = query['usgs_identifier']
                check_s2l1c_identifier(usgs_id, source='usgs')
                # entity_id = self._api.lookup('SENTINEL_2A', [usgs_id],
                #                              inverse=True)
                entity_id = self._api.get_entity_id([usgs_id], 'SENTINEL_2A')
                self.get_products_from_uuid_usgs(entity_id)
                return
            else:
                if "filename" in query:
                    esa_id = query['filename'].replace('.SAFE', '')
                else:
                    esa_id = query['identifier']
                check_s2l1c_identifier(esa_id, source='esa')
                esa_prod_id = esa_id.split('_')[-1]
                utm_tile = esa_id.split('_')[-2]
                acq_date = esa_id.split('_')[2].split('T')[0]
                acq_date_string = '{0}-{1}-{2}'.format(acq_date[:4],
                                                       acq_date[4:6],
                                                       acq_date[6:])
                start_date = end_date = acq_date_string
                # build the USGS style S2-identifier
                if utm_tile.startswith('T'):
                    utm_tile_base = utm_tile[1:]
                bbox = get_bbox_from_S2_UTMtile(utm_tile_base)
        else:
            # get coordinate pairs from wkt string
            str_1 = 'POLYGON(('
            str_2 = '))'
            coords = area[area.find(str_1) +
                          len(str_1):area.rfind(str_2)].split(',')
            # add one space to first pair for consistency
            coords[0] = ' ' + coords[0]
            lons = [float(pair.split(' ')[1]) for pair in coords]
            lats = [float(pair.split(' ')[2]) for pair in coords]
            bbox = (min(lons), min(lats), max(lons), max(lats))
            start_date = start
            end_date = end
        usgs_args = {
            'dataset': 'SENTINEL_2A',
            'bbox': bbox,
            'start_date': start_date,
            'end_date': end_date
        }
        if clouds:
            usgs_args['max_cloud_cover'] = clouds
        if limit:
            usgs_args['max_results'] = limit
        scenes = self._api.search(**usgs_args)
        self._api.logout()
        if query:
            # check if the UTM-Tile is correct, remove otherwise
            for scene in scenes:
                if scene['display_id'].split('_')[1] != utm_tile:
                    scenes.remove(scene)
            # remove redundant scene
            if len(scenes) == 2:
                for scene in scenes:
                    prod_id = scene['display_id'].split('_')[-1]
                    if prod_id != esa_prod_id:
                        scenes.remove(scene)
        if len(scenes) < 1:
            gs.message(_('No product found'))
            return
        scenes_df = pandas.DataFrame.from_dict(scenes)
        if sortby:
            # replace sortby keywords with USGS keywords
            for idx, keyword in enumerate(sortby):
                if keyword == 'cloudcoverpercentage':
                    sortby[idx] = 'cloud_cover'
                    # turn cloudcover to float to make it sortable
                    scenes_df['cloud_cover'] = pandas.to_numeric(
                        scenes_df['cloud_cover'])
                elif keyword == 'ingestiondate':
                    sortby[idx] = 'acquisition_date'
                # what does sorting by footprint mean
                elif keyword == 'footprint':
                    sortby[idx] = 'display_id'
            self._products_df_sorted = scenes_df.sort_values(sortby,
                                                             ascending=[asc] *
                                                             len(sortby),
                                                             ignore_index=True)
        else:
            self._products_df_sorted = scenes_df
        gs.message(
            _('{} Sentinel product(s) found').format(
                len(self._products_df_sorted)))