def test_is_online():
    api = SentinelAPI("mock_user", "mock_password")

    uuid = '98ca202b-2155-4181-be88-4358b2cbaaa0'
    invalid_uuid = '98ca202b-2155-4181-be88-xxxxxxxxxxxx'

    request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('{}')/Online/$value"

    with requests_mock.mock() as rqst:
        rqst.get(
            request_url.format(uuid),
            text="true", status_code=200
        )
        assert api.is_online(uuid) == True

    with requests_mock.mock() as rqst:
        rqst.get(
            request_url.format(uuid),
            text="false", status_code=200
        )
        assert api.is_online(uuid) == False


    with requests_mock.mock() as rqst:
        rqst.get(
            request_url.format(invalid_uuid),
            text='{{"error":{{"code":null,"message":{{"lang":"en","value":'
                 'Invalid key ({}) to access Products}}}}}}'.format(invalid_uuid),
            status_code=200
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.is_online(invalid_uuid)
Beispiel #2
0
    footprint = geojson_to_wkt(read_geojson(site_json))
    scenes = download_api.query(footprint,
                                date=(start_date, end_date),
                                productType='GRD',
                                sensoroperationalmode='IW',
                                polarisationmode='VV VH',
                                platformname='Sentinel-1')
    #cloudcoverpercentage = (0, 30))

    scenes_retry = []
    for scene_id in scenes:
        scene = scenes[scene_id]
        print('date: %s; scene: %s' %
              (scene['beginposition'][:10], scene['title']))
        if not download_api.is_online(scene_id):
            if not os.path.isfile('%s/%s.zip' %
                                  (download_dir, scene['title'])):
                scenes_retry.append(scene)
        else:
            try:
                download_api.download(scene_id, directory_path=download_dir)
            except:
                scenes_retry.append(scene)

    print('%i scenes remaining to be requested' % len(scenes_retry))
    problem_scenes = []
    while len(scenes_retry) > 0:
        scenes_offline = []
        for ii, scene in enumerate(scenes_retry):
            print('%i of %i; date: %s; scene: %s' %
Beispiel #3
0
class SentinelWrapper:
    def __init__(self):

        logger.info("connect to sentinel API")

        # connection to API for search queries and download requests
        self.api = SentinelAPI(config.copernicusUser, config.copernicusPW,
                               config.copernicusURL)

        logger.info("sentinel API connected")

    def get_sentinel_products(self, lat, lon, date_from, date_to, platform,
                              **kwargs):

        logger.info("start sentinel query")

        # convert geolocation coordinates to wkt format
        footprint = geojson_to_wkt(Point((lon, lat)))

        # prepare parameter for cloud coverage
        if "cloudcoverpercentage" in kwargs:
            kwargs["cloudcoverpercentage"] = (0,
                                              kwargs["cloudcoverpercentage"])

        # search query
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                result = self.api.query(footprint,
                                        date=(date_from, date_to),
                                        platformname=platform,
                                        **kwargs)
                break
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

        logger.info("sentinel query complete")

        return result

    # download multiple sentinel products (list of product IDs)
    def download_sentinel_products(self, products):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                logger.info("start downloading sentinel product list")
                self.api.download_all(products, config.bigTilesDir)
                logger.info("download complete")
                break
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    # download sentinel product with certain product ID
    def download_sentinel_product(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                logger.info("start downloading sentinel product")
                product_info = self.api.download(product_id,
                                                 config.bigTilesDir)
                if not product_info["Online"]:
                    logger.info("archived download triggered")
                    return False
                else:
                    # TODO: Download should be checked
                    logger.info("download complete")
                    return True
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def get_product_data(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                return self.api.get_product_odata(product_id)
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def ready_for_download(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                return self.api.is_online(product_id)
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def request_offline_tile(self, last_tile_download_request, product_id):

        # check if last request not within request delay
        last_request = utils.minutes_since_last_download_request()
        if last_request == None or last_request > config.copernicusRequestDelay:

            if last_tile_download_request == None or \
                    utils.minutes_since_timestamp(last_tile_download_request) > config.copernicusRepeatRequestAfterMin:

                for attempt in range(1,
                                     config.serverFailureRequestRepeats + 1):
                    try:
                        # HTTP-Code 202: Accepted for retrieval
                        # TODO: handle other HTTP-Codes as well...
                        product_info = self.api.get_product_odata(product_id)
                        if self.api._trigger_offline_retrieval(
                                product_info["url"]) == 202:
                            return True
                        else:
                            return False
                    except SentinelAPIError as e:
                        print(repr(e))
                        if attempt < config.serverFailureRequestRepeats:
                            print(
                                f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                            )
                            logger.info(
                                f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                            )
                            time.sleep(60 * config.serverFailureRequestDelay)
                        else:
                            print("Last attempt failed. Aborting.")
                            logger.info(
                                "Last attempt to connect to Sentinel server failed. Aborting."
                            )

        else:

            return False
Beispiel #4
0
def main(args):
    """ Runs dataLayer processing scripts to turn raw dataLayer from (../raw) into
        cleaned dataLayer ready to be analyzed (saved in ../processed).
    """
    ## Talk to Rune about how dataLayer is handle. If it should be part of the "big" project.
    ## set number_tiles:1764
    config = TrainingConfig()
    config = update_config(args, config)
    logger = logging.getLogger(__name__)
    logger.info('making final dataLayer set from raw dataLayer')
    userTuple = [['pandagud', 'damp4ever'], ['pandagud2', 'damp4ever'],
                 ['pandagud3', 'damp4ever'], ['au524478', 'Palantir1234']]
    current_user = random.choice(userTuple)

    api = SentinelAPI(current_user[0], current_user[1],
                      'https://scihub.copernicus.eu/dhus')

    # search by polygon, time, and SciHub query keywords
    path = r"C:\Users\panda\Downloads\LC80290292014132LGN00.geojson"
    footprint = geojson_to_wkt(read_geojson(path))
    products = api.query(area=footprint,
                         date=('20210101', '20210105'),
                         platformname='Sentinel-2',
                         order_by='+ingestiondate',
                         limit=1)
    areas = api.to_geodataframe(products)
    geojson = api.to_geojson(products)
    api.download_all(products, into=r'C:\Users\panda\Sat_paper\Alfa')

    products = api.query(area=footprint,
                         date=('20210401', '20210430'),
                         producttype='GRD',
                         platformname='Sentinel-1',
                         sensoroperationalmode='IW',
                         polarisationmode='VV VH',
                         order_by='ingestiondate')
    firstproduct = next(iter(products))
    online_product = ''
    for i in products:
        is_online = api.is_online(products.get(i).get('uuid'))
        if is_online:
            online_product = i
            break
    delete_list = []
    for i in products:
        if i != online_product:
            delete_list.append(i)
    for i in delete_list:
        del products[i]

    ground_geojsons = read_geojson(path)
    products_geojsons = api.to_geojson(products)

    ground_polygon = ground_geojsons.get('features')[0].get('geometry').get(
        'coordinates')
    ground_polygon = geometry.Polygon(ground_polygon[0][0])
    import numpy as np
    titles = []
    ids = []
    for item in products_geojsons.get('features'):
        id = item.get('properties').get('id')
        item = item.get('properties').get('title')
        item = (item[17:25] + item[48:55])
        titles.append(item)
        ids.append([item, id])
    unique = list(set(titles))
    union_list = []
    for i, element in enumerate(unique):
        local_polygon = Polygon()
        for j in range(len(titles)):
            if titles[j] == element:
                item = products_geojsons.get('features')[j]
                item = item.get('geometry').get('coordinates')
                item = geometry.Polygon(item[0][0])
                item = affinity.scale(item, xfact=1.01, yfact=1.01)
                polygons = [item, local_polygon]
                local_polygons = unary_union(polygons)
                local_polygon = item
        union_list.append([local_polygons, element])
    for index, element in enumerate(union_list):
        wkt = element[0].wkt
        if ground_polygon.within(element[0]):
            found_id = element[1]
            break
    for i in ids:
        if found_id != i[0]:
            del products[i[1]]
    area_list = []
    for index, item in enumerate(products_geojsons.get('features')):
        item = item.get('geometry').get('coordinates')
        item = geometry.Polygon(item[0][0])
        local_intersection = item.intersection(ground_polygon)
        local_intersection = [local_intersection.area, index]
        area_list.append(local_intersection)
    area_list.sort(reverse=True)
    for index in range(len(area_list)):
        item = products_geojsons.get('features')[area_list[index][1]]
        id = item.get('properties').get('id')
        item = item.get('geometry').get('coordinates')
        item = geometry.Polygon(item[0][0])
        if item.intersects(ground_polygon):
            local_intersection = ground_polygon.intersection(item)
            print(str(ground_polygon.area))
            print(str(local_intersection.area))
            # ground_polygon = ground_polygon.difference(local_intersection)
            ground_polygon = (ground_polygon.symmetric_difference(
                local_intersection)).difference(local_intersection)
        else:
            del products[id]
    import datetime
    from datetime import timedelta
    S2_geojson = read_geojson(path)

    start_S1_date = S2_geojson.get('features')[0].get('properties').get(
        'ingestiondate')
    start_S1_date = start_S1_date.split('T')[0]
    start_S1_date = datetime.datetime.strptime(start_S1_date,
                                               '%Y-%m-%d').date()
    ## New end date for S1
    end_S1_date = start_S1_date + timedelta(days=7)
    start_S1_date = start_S1_date - timedelta(days=7)
    start_S1_date_str = str(start_S1_date).replace('-', '')
    end_S1_date_str = str(end_S1_date).replace('-', '')

    ## COMBINE FOOTPRINT
    geom_in_geojson = []
    geom_in_geojson.append(
        geojson.Feature(geometry=ground_polygon,
                        properties={"MissingData": "Test"}))
    feature_collection = FeatureCollection(geom_in_geojson)
    pathToFile = r'C:\Users\panda\Sat_paper\missing.geojson'
    with open(pathToFile, 'w') as f:
        dump(feature_collection, f)

    print("Done")