コード例 #1
0
ファイル: test_mod.py プロジェクト: valgur/sentinelsat
def test_scihub_unresponsive():
    timeout_connect = 6
    timeout_read = 6.6
    timeout = (timeout_connect, timeout_read)

    api = SentinelAPI("mock_user", "mock_password", timeout=timeout)

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ReadTimeout)
        with pytest.raises(requests.exceptions.ReadTimeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
コード例 #2
0
def test_scihub_unresponsive():
    api = SentinelAPI("mock_user", "mock_password")

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY,
                     requests_mock.ANY,
                     exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.Timeout):
            api.query(**_small_query)

        with pytest.raises(requests.exceptions.Timeout):
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.Timeout):
            api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        with pytest.raises(requests.exceptions.Timeout):
            api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
コード例 #3
0
def test_SentinelAPI_wrong_credentials(small_query):
    api = SentinelAPI("wrong_user", "wrong_password")

    @contextmanager
    def assert_exception():
        with pytest.raises(UnauthorizedError) as excinfo:
            yield
        assert excinfo.value.response.status_code == 401
        assert "Invalid user name or password" in excinfo.value.msg

    with assert_exception():
        api.query(**small_query)
    with assert_exception():
        api.get_product_odata("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")
    with assert_exception():
        api.download("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")
    with assert_exception():
        api.download_all(["8df46c9e-a20c-43db-a19a-4240c2ed3b8b"])
コード例 #4
0
def _get_quicklook(api_kwargs, cassette):
    api = SentinelAPI(**api_kwargs)
    ids = [
        "6b126ea4-fe27-440c-9a5c-686f386b7291",
        "1a9401bc-6986-4707-b38d-f6c29ca58c00",
        "54e6c4ad-6f4e-4fbf-b163-1719f60bfaeb",
    ]
    with cassette:
        odata = [api.get_product_odata(x) for x in ids]
    return odata
コード例 #5
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI(
        "wrong_user",
        "wrong_password"
    )
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
    assert excinfo.value.response.status_code == 401
コード例 #6
0
ファイル: test_mod.py プロジェクト: NiklasKeck/sentinelsat
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI(
        "wrong_user",
        "wrong_password"
    )
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query(**_small_query)
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
    assert excinfo.value.response.status_code == 401

    with pytest.raises(SentinelAPIError) as excinfo:
        api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
    assert excinfo.value.response.status_code == 401
コード例 #7
0
def test_get_product_odata_short():
    api = SentinelAPI(**_api_auth)

    expected_short = {
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b': {
            'id':
            '8df46c9e-a20c-43db-a19a-4240c2ed3b8b',
            'size':
            143549851,
            'md5':
            'D5E4DF5C38C6E97BF7E7BD540AB21C05',
            'url':
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value",
            'date':
            datetime(2015, 11, 21, 10, 3, 56, 675000),
            'footprint':
            'POLYGON((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,'
            '-63.852531 -5.880887))',
            'title':
            'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC'
        },
        '44517f66-9845-4792-a988-b5ae6e81fd3e': {
            'id':
            '44517f66-9845-4792-a988-b5ae6e81fd3e',
            'date':
            datetime(2015, 12, 27, 14, 22, 29),
            'footprint':
            'POLYGON((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,'
            '-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,'
            '-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,'
            '-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,'
            '-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,'
            '-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,'
            '-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,'
            '-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,'
            '-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,'
            '-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'md5':
            '48C5648C2644CE07207B3C943DEDEB44',
            'size':
            5854429622,
            'title':
            'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'url':
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value"
        }
    }
    for id, expected in expected_short.items():
        ret = api.get_product_odata(id)
        assert set(ret) == set(expected)
        for k in ret:
            assert ret[k] == expected[k]
コード例 #8
0
def test_scihub_unresponsive(small_query):
    timeout_connect = 6
    timeout_read = 6.6
    timeout = (timeout_connect, timeout_read)

    api = SentinelAPI("mock_user", "mock_password", timeout=timeout)

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY,
                     requests_mock.ANY,
                     exc=requests.exceptions.ConnectTimeout)
        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.query(**small_query)

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.get_product_odata("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")

        with pytest.raises(requests.exceptions.ConnectTimeout):
            api.download_all(["8df46c9e-a20c-43db-a19a-4240c2ed3b8b"])

    with requests_mock.mock() as rqst:
        rqst.request(requests_mock.ANY,
                     requests_mock.ANY,
                     exc=requests.exceptions.ReadTimeout)
        with pytest.raises(requests.exceptions.ReadTimeout):
            api.query(**small_query)

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.get_product_odata("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download("8df46c9e-a20c-43db-a19a-4240c2ed3b8b")

        with pytest.raises(requests.exceptions.ReadTimeout):
            api.download_all(["8df46c9e-a20c-43db-a19a-4240c2ed3b8b"])
コード例 #9
0
def _get_smallest(api_kwargs, cassette, online, n=3):
    time_range = ("NOW-1MONTH", None) if online else (None, "20170101")
    odatas = []
    with cassette:
        api = SentinelAPI(**api_kwargs)
        products = api.query(date=time_range, size="/.+KB/", limit=15)
        for uuid in products:
            odata = api.get_product_odata(uuid)
            if odata["Online"] == online:
                odatas.append(odata)
                if len(odatas) == n:
                    break
    assert len(odatas) == n
    return odatas
コード例 #10
0
ファイル: test_mod.py プロジェクト: mortezaomidi/sentinelsat
def test_get_product_odata_scihub_down():
    api = SentinelAPI("mock_user", "mock_password")

    with requests_mock.mock() as rqst:
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
            text="Mock SciHub is Down",
            status_code=503)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text="Mock SciHub is Down",
            status_code=200)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        # Test with a real server response
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
            text=textwrap.dedent("""\
            <!doctype html>
            <title>The Sentinels Scientific Data Hub</title>
            <link href='https://fonts.googleapis.com/css?family=Open+Sans' rel='stylesheet' type='text/css'>
            <style>
            body { text-align: center; padding: 125px; background: #fff;}
            h1 { font-size: 50px; }
            body { font: 20px 'Open Sans',Helvetica, sans-serif; color: #333; }
            article { display: block; text-align: left; width: 820px; margin: 0 auto; }
            a { color: #0062a4; text-decoration: none; font-size: 26px }
            a:hover { color: #1b99da; text-decoration: none; }
            </style>

            <article>
            <img alt="" src="/datahub.png" style="float: left;margin: 20px;">
            <h1>The Sentinels Scientific Data Hub will be back soon!</h1>
            <div style="margin-left: 145px;">
            <p>
            Sorry for the inconvenience,<br/>
            we're performing some maintenance at the moment.<br/>
            </p>
            <!--<p><a href="https://scihub.copernicus.eu/news/News00098">https://scihub.copernicus.eu/news/News00098</a></p>-->
            <p>
            We'll be back online shortly!
            </p>
            </div>
            </article>
            """),
            status_code=502)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
コード例 #11
0
ファイル: test_mod.py プロジェクト: ibamacsr/sentinelsat
def test_get_product_odata_scihub_down():
    api = SentinelAPI("mock_user", "mock_password")

    with requests_mock.mock() as rqst:
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
            text="Mock SciHub is Down", status_code=503
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text="Mock SciHub is Down", status_code=200
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        # Test with a real server response
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
            text=textwrap.dedent("""\
            <!doctype html>
            <title>The Sentinels Scientific Data Hub</title>
            <link href='https://fonts.googleapis.com/css?family=Open+Sans' rel='stylesheet' type='text/css'>
            <style>
            body { text-align: center; padding: 125px; background: #fff;}
            h1 { font-size: 50px; }
            body { font: 20px 'Open Sans',Helvetica, sans-serif; color: #333; }
            article { display: block; text-align: left; width: 820px; margin: 0 auto; }
            a { color: #0062a4; text-decoration: none; font-size: 26px }
            a:hover { color: #1b99da; text-decoration: none; }
            </style>

            <article>
            <img alt="" src="/datahub.png" style="float: left;margin: 20px;">
            <h1>The Sentinels Scientific Data Hub will be back soon!</h1>
            <div style="margin-left: 145px;">
            <p>
            Sorry for the inconvenience,<br/>
            we're performing some maintenance at the moment.<br/>
            </p>
            <!--<p><a href="https://scihub.copernicus.eu/news/News00098">https://scihub.copernicus.eu/news/News00098</a></p>-->
            <p>
            We'll be back online shortly!
            </p>
            </div>
            </article>
            """),
            status_code=502)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
コード例 #12
0
def smallest_archived_products(api_kwargs, vcr):
    n = 3
    api = SentinelAPI(**api_kwargs)
    # Find some small and old products expecting them to be archived due to age.
    # Can't use the OData API for this as we do for the online products
    # because the ContentLength value there is not match the true product size.
    odatas = []
    with vcr.use_cassette("smallest_archived_products"):
        products = api.query(date=(None, "20170101"), size="/.+KB/", limit=10)
        for uuid in products:
            odata = api.get_product_odata(uuid)
            if not odata["Online"]:
                odatas.append(odata)
                if len(odatas) == n:
                    break
        assert len(odatas) == n
    return odatas
コード例 #13
0
ファイル: test_mod.py プロジェクト: valgur/sentinelsat
def test_get_product_odata_scihub_down(read_fixture_file):
    api = SentinelAPI("mock_user", "mock_password")

    request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json"

    with requests_mock.mock() as rqst:
        rqst.get(
            request_url,
            text="Mock SciHub is Down", status_code=503
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        rqst.get(
            request_url,
            text='{"error":{"code":null,"message":{"lang":"en","value":'
                 '"No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "}}}',
            status_code=500
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "

        rqst.get(
            request_url,
            text="Mock SciHub is Down", status_code=200
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        # Test with a real "server under maintenance" response
        rqst.get(
            request_url,
            text=read_fixture_file('server_maintenance.html'),
            status_code=502)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
コード例 #14
0
def test_get_product_odata_scihub_down(read_fixture_file):
    api = SentinelAPI("mock_user", "mock_password")

    request_url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json"

    with requests_mock.mock() as rqst:
        rqst.get(
            request_url,
            text="Mock SciHub is Down", status_code=503
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        rqst.get(
            request_url,
            text='{"error":{"code":null,"message":{"lang":"en","value":'
                 '"No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "}}}',
            status_code=500
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "

        rqst.get(
            request_url,
            text="Mock SciHub is Down", status_code=200
        )
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        # Test with a real "server under maintenance" response
        rqst.get(
            request_url,
            text=read_fixture_file('server_maintenance.html'),
            status_code=502)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
コード例 #15
0
ファイル: test_mod.py プロジェクト: ibamacsr/sentinelsat
def test_get_product_odata_short():
    api = SentinelAPI(**_api_auth)

    expected_short = {
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b': {
            'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b',
            'size': 143549851,
            'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05',
            'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value",
            'date': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'footprint': 'POLYGON((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,'
                         '-63.852531 -5.880887))',
            'title': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC'
        },
        '44517f66-9845-4792-a988-b5ae6e81fd3e': {
            'id': '44517f66-9845-4792-a988-b5ae6e81fd3e',
            'date': datetime(2015, 12, 27, 14, 22, 29),
            'footprint': 'POLYGON((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,'
                         '-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,'
                         '-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,'
                         '-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,'
                         '-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,'
                         '-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,'
                         '-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,'
                         '-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,'
                         '-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,'
                         '-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'md5': '48C5648C2644CE07207B3C943DEDEB44',
            'size': 5854429622,
            'title': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value"
        }
    }
    for id, expected in expected_short.items():
        ret = api.get_product_odata(id)
        assert set(ret) == set(expected)
        for k in ret:
            assert ret[k] == expected[k]
コード例 #16
0
ファイル: test_mod.py プロジェクト: ibamacsr/sentinelsat
def test_get_product_odata_full():
    api = SentinelAPI(**_api_auth)

    expected_full = {
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b': {
            'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b',
            'title': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC',
            'size': 143549851,
            'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05',
            'date': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'footprint': 'POLYGON((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))',
            'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value",
            'Acquisition Type': 'NOMINAL',
            'Carrier rocket': 'Soyuz',
            'Cycle number': 64,
            'Date': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'Filename': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC.SAFE',
            'Footprint': '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml">\n   <gml:outerBoundaryIs>\n      <gml:LinearRing>\n         <gml:coordinates>-5.880887,-63.852531 -5.075419,-67.495872 -3.084356,-67.066071 -3.880541,-63.430576 -5.880887,-63.852531</gml:coordinates>\n      </gml:LinearRing>\n   </gml:outerBoundaryIs>\n</gml:Polygon>',
            'Format': 'SAFE',
            'Identifier': 'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC',
            'Ingestion Date': datetime(2015, 11, 21, 13, 22, 4, 992000),
            'Instrument': 'SAR-C',
            'Instrument abbreviation': 'SAR-C SAR',
            'Instrument description': '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>',
            'Instrument description text': 'The SAR Antenna Subsystem (SAS) is developed and build by AstriumGmbH. It is a large foldable planar phased array antenna, which isformed by a centre panel and two antenna side wings. In deployedconfiguration the antenna has an overall aperture of 12.3 x 0.84 m.The antenna provides a fast electronic scanning capability inazimuth and elevation and is based on low loss and highly stablewaveguide radiators build in carbon fibre technology, which arealready successfully used by the TerraSAR-X radar imaging mission.The SAR Electronic Subsystem (SES) is developed and build byAstrium Ltd. It provides all radar control, IF/ RF signalgeneration and receive data handling functions for the SARInstrument. The fully redundant SES is based on a channelisedarchitecture with one transmit and two receive chains, providing amodular approach to the generation and reception of wide-bandsignals and the handling of multi-polarisation modes. One keyfeature is the implementation of the Flexible Dynamic BlockAdaptive Quantisation (FD-BAQ) data compression concept, whichallows an efficient use of on-board storage resources and minimisesdownlink times.',
            'Instrument mode': 'EW',
            'Instrument name': 'Synthetic Aperture Radar (C-band)',
            'Instrument swath': 'EW',
            'JTS footprint': 'POLYGON ((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))',
            'Launch date': 'April 3rd, 2014',
            'Mission datatake id': 50722,
            'Mission type': 'Earth observation',
            'Mode': 'EW',
            'NSSDC identifier': '0000-000A',
            'Operator': 'European Space Agency',
            'Orbit number (start)': 8701,
            'Orbit number (stop)': 8701,
            'Pass direction': 'DESCENDING',
            'Phase identifier': 1,
            'Polarisation': 'VV VH',
            'Product class': 'S',
            'Product class description': 'SAR Standard L1 Product',
            'Product composition': 'Slice',
            'Product level': 'L1',
            'Product type': 'GRD',
            'Relative orbit (start)': 54, 'Relative orbit (stop)': 54, 'Resolution': 'Medium',
            'Satellite': 'Sentinel-1',
            'Satellite description': '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>',
            'Satellite name': 'Sentinel-1',
            'Satellite number': 'A',
            'Sensing start': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'Sensing stop': datetime(2015, 11, 21, 10, 4, 29, 714000),
            'Size': '223.88 MB',
            'Slice number': 1,
            'Start relative orbit number': 54,
            'Status': 'ARCHIVED',
            'Stop relative orbit number': 54,
            'Timeliness Category': 'Fast-24h'
        },
        '44517f66-9845-4792-a988-b5ae6e81fd3e': {
            'id': '44517f66-9845-4792-a988-b5ae6e81fd3e',
            'title': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'size': 5854429622,
            'md5': '48C5648C2644CE07207B3C943DEDEB44',
            'date': datetime(2015, 12, 27, 14, 22, 29),
            'footprint': 'POLYGON((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'url': "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value",
            'Cloud cover percentage': 18.153846153846153,
            'Date': datetime(2015, 12, 27, 14, 22, 29),
            'Degraded MSI data percentage': 0, 'Degraded ancillary data percentage': 0,
            'Filename': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229.SAFE',
            'Footprint': '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml">\n   <gml:outerBoundaryIs>\n      <gml:LinearRing>\n         <gml:coordinates>-4.565257232533263,-58.80274769505742 -5.513960396525286,-58.80535376268811 -5.515947033626909,-57.90315169909761 -5.516014389089381,-57.903151791669515 -5.516044812342758,-57.85874693129081 -5.516142631941845,-57.814323596961835 -5.516075248310466,-57.81432351345917 -5.516633044843839,-57.00018056571297 -5.516700066819259,-57.000180565731384 -5.51666329264377,-56.95603179187787 -5.516693539799448,-56.91188395837315 -5.51662651925904,-56.91188396736038 -5.515947927683427,-56.097209386295305 -5.516014937246069,-56.09720929423562 -5.5159111504805916,-56.053056977999596 -5.515874390220655,-56.00892491028779 -5.515807411549814,-56.00892501130261 -5.513685455771881,-55.10621586418906 -4.6092845892233,-55.108821882251775 -4.606372862374043,-54.20840287327946 -3.658594390979672,-54.21169990975238 -2.710949551849636,-54.214267703869346 -2.7127451087194463,-55.15704255065496 -2.71378646425769,-56.0563616875051 -2.7141556791285275,-56.9561852630143 -2.713837142510183,-57.8999998009875 -3.6180222056692726,-57.90079161941062 -3.616721351843382,-58.800616247288836 -4.565257232533263,-58.80274769505742</gml:coordinates>\n      </gml:LinearRing>\n   </gml:outerBoundaryIs>\n</gml:Polygon>',
            'Format': 'SAFE',
            'Format correctness': 'PASSED',
            'General quality': 'PASSED',
            'Generation time': datetime(2015, 12, 28, 11, 25, 23, 357),
            'Geometric quality': 'PASSED',
            'Identifier': 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'Ingestion Date': datetime(2015, 12, 28, 10, 57, 13, 725000),
            'Instrument': 'MSI',
            'Instrument abbreviation': 'MSI',
            'Instrument mode': 'INS-NOBS',
            'Instrument name': 'Multi-Spectral Instrument',
            'JTS footprint': 'POLYGON ((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'Mission datatake id': 'GS2A_20151227T140932_002681_N02.01',
            'NSSDC identifier': '2015-000A',
            'Orbit number (start)': 2681,
            'Pass direction': 'DESCENDING',
            'Platform serial identifier': 'Sentinel-2A',
            'Processing baseline': 2.01,
            'Processing level': 'Level-1C',
            'Product type': 'S2MSI1C',
            'Radiometric quality': 'PASSED',
            'Relative orbit (start)': 110,
            'Satellite': 'Sentinel-2',
            'Satellite name': 'Sentinel-2',
            'Satellite number': 'A',
            'Sensing start': datetime(2015, 12, 27, 14, 22, 29),
            'Sensing stop': datetime(2015, 12, 27, 14, 22, 29),
            'Sensor quality': 'PASSED',
            'Size': '5.50 GB'
        }
    }
    for id, expected in expected_full.items():
        ret = api.get_product_odata(id, full=True)
        assert set(ret) == set(expected)
        for k in ret:
            assert ret[k] == expected[k]
コード例 #17
0
    def badana(self):
        global kappa
        kappa=1
        self.deneme="badana içine girdiğini gösteren kod"
        print(self.deneme)
        print(self.adana)
        api = SentinelAPI('flavves', 'şifre', 'https://scihub.copernicus.eu/dhus')
        footprint = geojson_to_wkt(read_geojson('media/map.geojson'))
        print(footprint)
        self.products = api.query(footprint,date=('20191219', date(2019, 12, 29)),platformname='Sentinel-2')
        products_df = api.to_dataframe(self.products)
        print("oluyor galiba")
        self.products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True])
        self.products_df_sorted = self.products_df_sorted.head(1)



        self.df=self.products_df_sorted
        self.NotDefteriKaydi = self.df.values.tolist()
        self.str_denemesi=str(self.NotDefteriKaydi)

        self.Kaydetmeye_basla=list(self.str_denemesi.split(","))
        self.yerler=[0,7,8,9,12,14,18,19,20]
        self.isimler=["Dosya adı:","Uydu adı","Dosya boyutu","Çekim tarihi","Orbit numarası","Bulut yüzdesi","vejetasyon yüzdesi","su yüzdesi","not vejetasyon yüzdesi"]
        self.i=0
        with open("media/books/txt/deneme.txt", "w") as self.dosya:
            for self.sira in self.yerler:   
                print(self.isimler[self.i]+":"+self.Kaydetmeye_basla[self.sira])
                self.yaz=(self.isimler[self.i]+":"+self.Kaydetmeye_basla[self.sira])
                self.i=self.i+1
                self.dosya.write(self.yaz)
                self.dosya.write("\n")
        self.dosya.close()
        



        print(self.products_df_sorted)
        print("indirme başladı")
        #burasını blockladım çünkü 1 gb arşiv indiriyor
        #api.download_all(self.products_df_sorted.index)
        print("indirme bitti")
        self.veri_cekme=self.products_df_sorted.index
        self.veri_cekme1=self.veri_cekme[0]

        """
        Bu işlem arşivden çıkarmak için gerekli arşivin adı indirdiğimiz verinin title adı oluyor

        """
        self.arsiv_adi=api.get_product_odata(self.veri_cekme1)
        self.arsiv_adi=self.arsiv_adi["title"]
        self.arsiv_adi=str(self.arsiv_adi)
        print(self.arsiv_adi)
                
        
        self.a = Archive(self.arsiv_adi+'.zip')
        self.a.extract()
        self.img_data_klasor_ismi=os.listdir((self.arsiv_adi+".SAFE"+'/GRANULE'))
        self.img_data_klasor_ismi=self.img_data_klasor_ismi[0]
        self.img_data_klasor_ismi=str(self.img_data_klasor_ismi)
        self.dosya_yer_=(self.arsiv_adi+".SAFE"+'/GRANULE/'+self.img_data_klasor_ismi+'/IMG_DATA')
        self.resim_isim=os.listdir(self.dosya_yer_)
        print(self.dosya_yer_)

        """
        şimdi ise resimleri rasterio ile bi kullanalım

        """

        if self.resim_isim == "R10m" or "R20m" or "R60m":
            self.dosya_yer_=(self.arsiv_adi+".SAFE"+'/GRANULE/'+self.img_data_klasor_ismi+'/IMG_DATA/R60m')
            self.resim_isim=os.listdir(self.dosya_yer_)
            self.resim_isim[2]
            self.resim_isim[3]
                
            self.jp2ler = [self.resim_isim[2],self.resim_isim[3]]
            self.bands = []
            
            #burası bizim jp2 dosyalarımızı okuyacak
            
            for self.jp2 in self.jp2ler:
                with rasterio.open(self.dosya_yer_+"/"+self.jp2) as self.f:
                    self.bands.append(self.f.read(1))
            
            #resimlerimizi ayrıştırdık özel bantlara
            
            self.band_red=self.bands[0]
            self.band_nir=self.bands[1]
            print("bant değerleri hesaplandı")
            print(self.bands[0],self.bands[1])
        else:
        
            self.resim_isim[2]
            self.resim_isim[3]
            
            
            self.jp2ler = [self.resim_isim[2],self.resim_isim[3]]
            self.bands = []
            
            #burası bizim jp2 dosyalarımızı okuyacak
            
            for self.jp2 in self.jp2ler:
                with rasterio.open(self.dosya_yer_+"/"+self.jp2) as f:
                    self.bands.append(self.f.read(1))
            
            #resimlerimizi ayrıştırdık özel bantlara
            self.band_red=self.bands[0]
            self.band_nir=self.bands[1]
            print("bant değerleri hesaplandı")
            print(self.bands[0],self.bands[1])
                        
        # Klasik NDVI denklemi ile hesaplama
        print("ndvı hesaplanıyor")
        np.seterr(divide='ignore', invalid='ignore')

        # Calculate NDVI. This is the equation at the top of this guide expressed in code
        self.ndvi = (self.band_nir.astype(float) - self.band_red.astype(float)) / (self.band_nir + self.band_red)
        #su için yapıyorum bu analizi
        ##
        ###
        self.ndvi=(self.band_red.astype(float) - self.band_nir.astype(float)) / (self.band_red + self.band_nir)
        ###
        ###
        np.nanmin(self.ndvi), np.nanmax(self.ndvi)
        print("ndvı değerler aralıkları")
        print(np.nanmin(self.ndvi), np.nanmax(self.ndvi))


        

        

        # görüntümüze bakalım renklerine ayrılmış bir görüntümüz var
        # çizim yapacağız bunun için gerekli kütüphaneler ekleniyor

        # NDVI bilindiği üzere 1 ve -1 arasındaki değerlerde sınıflandırılır.
        # Biz de bu değerleri renklerle göstermek istiyoruz.
        # Bunun için alınan sayısal değerleri farklı renk spektrumlarına atayarak elimizde NDVI için renklendirilmiş bir görüntümüz olacaktır
        # 
        # Bir orta nokta belirledik ve bu sola ve sağa olacak şekilde renklendiriyoru renk spekturumunu da aşağıda paylaşacağım

        class RenkNormalizasyonu(colors.Normalize):
        
            def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
                self.midpoint = midpoint
                colors.Normalize.__init__(self, vmin, vmax, clip)

            def __call__(self, value, clip=None):

                x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
                return np.ma.masked_array(np.interp(value, x, y), np.isnan(value))

        self.min=np.nanmin(self.ndvi)
        self.max=np.nanmax(self.ndvi)
        self.mid=0.1
        print("bitti mi şimdi bok biter")
        print(self.min,self.max)

                
        self.fig = plt.figure(figsize=(20,10))
        self.ax = self.fig.add_subplot(111)

        self.cmap = plt.cm.RdYlGn 

        self.cax = self.ax.imshow(self.ndvi, cmap=self.cmap, clim=(self.min, self.max), norm=RenkNormalizasyonu(midpoint=self.mid,vmin=self.min, vmax=self.max))

        self.ax.axis('off')
        self.ax.set_title('NDVI görüntüsü', fontsize=18, fontweight='bold')

        self.cbar = self.fig.colorbar(self.cax, orientation='horizontal', shrink=0.65)
        #normalde byu alttaki gibi kaydetsin ama şimdilik benim yazdığım gibi yapsın olur mu cnm muck
        #self.fig_kaydet="resimler/"+self.resim_isim[2]+".tif"
        self.fig_kaydet="media/books/covers/denemeresmi.png"
        self.fig.savefig(self.fig_kaydet, dpi=200, bbox_inches='tight', pad_inches=0.7)
        self.fig_kaydet_tif="media/books/covers/denemeresmi.tif"
        self.fig.savefig(self.fig_kaydet_tif, dpi=200, bbox_inches='tight', pad_inches=0.7)
コード例 #18
0
ファイル: Sen1download.py プロジェクト: Simonhong111/SIF
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt

api = SentinelAPI('', '', "https://scihub.copernicus.eu/dhus")
footprint = geojson_to_wkt(read_geojson(r'D:\Sentinel-2Data\Hubei.json'))
print(footprint)

products = api.query(footprint,
                     producttype='SLC',
                     date=("20180601", "20180615"),
                     orbitdirection='ASCENDING',
                     platformname='Sentinel-1')
print(len(products))

for prod in products:
    print(api.get_product_odata(prod))

# api.download_all(products,directory_path="D:\Sentinel-2Data")
# api.download_all(products)
コード例 #19
0
class SentinelDownloader(object):
    def __init__(self,
                 user,
                 password,
                 api_url='https://scihub.copernicus.eu/dhus'):
        try:
            from sentinelsat import SentinelAPI
        except ImportError as e:
            gs.fatal(_("Module requires sentinelsat library: {}").format(e))
        try:
            import pandas
        except ImportError as e:
            gs.fatal(_("Module requires pandas library: {}").format(e))

        # init logger
        root = logging.getLogger()
        root.addHandler(logging.StreamHandler(sys.stderr))

        # connect SciHub via API
        self._api = SentinelAPI(user, password, api_url=api_url)

        self._products_df_sorted = None

    def filter(self,
               area,
               area_relation,
               clouds=None,
               producttype=None,
               limit=None,
               query={},
               start=None,
               end=None,
               sortby=[],
               asc=True):
        args = {}
        if clouds:
            args['cloudcoverpercentage'] = (0, int(clouds))
        if producttype:
            args['producttype'] = producttype
            if producttype.startswith('S2'):
                args['platformname'] = 'Sentinel-2'
            else:
                args['platformname'] = 'Sentinel-1'
        if not start:
            start = 'NOW-60DAYS'
        else:
            start = start.replace('-', '')
        if not end:
            end = 'NOW'
        else:
            end = end.replace('-', '')
        if query:
            redefined = [
                value for value in args.keys() if value in query.keys()
            ]
            if redefined:
                gs.warning(
                    "Query overrides already defined options ({})".format(
                        ','.join(redefined)))
            args.update(query)
        gs.verbose(
            "Query: area={} area_relation={} date=({}, {}) args={}".format(
                area, area_relation, start, end, args))
        products = self._api.query(area=area,
                                   area_relation=area_relation,
                                   date=(start, end),
                                   **args)
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_('No product found'))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby, ascending=[asc] * len(sortby))
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(
                int(limit))

        gs.message(
            _('{} Sentinel product(s) found').format(
                len(self._products_df_sorted)))

    def list(self):
        if self._products_df_sorted is None:
            return

        for idx in range(len(self._products_df_sorted['uuid'])):
            if 'cloudcoverpercentage' in self._products_df_sorted:
                ccp = '{0:2.0f}%'.format(
                    self._products_df_sorted['cloudcoverpercentage'][idx])
            else:
                ccp = 'cloudcover_NA'

            print('{0} {1} {2} {3} {4}'.format(
                self._products_df_sorted['uuid'][idx],
                self._products_df_sorted['identifier'][idx],
                self._products_df_sorted['beginposition'][idx].strftime(
                    "%Y-%m-%dT%H:%M:%SZ"),
                ccp,
                self._products_df_sorted['producttype'][idx],
            ))

    def download(self, output):
        if self._products_df_sorted is None:
            return

        if not os.path.exists(output):
            os.makedirs(output)
        gs.message(_('Downloading data into <{}>...').format(output))
        for idx in range(len(self._products_df_sorted['uuid'])):
            gs.message('{} -> {}.SAFE'.format(
                self._products_df_sorted['uuid'][idx],
                os.path.join(output,
                             self._products_df_sorted['identifier'][idx])))
            # download
            self._api.download(self._products_df_sorted['uuid'][idx], output)

    def save_footprints(self, map_name):
        if self._products_df_sorted is None:
            return

        try:
            from osgeo import ogr, osr
        except ImportError as e:
            gs.fatal(
                _("Option <footprints> requires GDAL library: {}").format(e))

        gs.message(_("Writing footprints into <{}>...").format(map_name))
        driver = ogr.GetDriverByName("GPKG")
        tmp_name = gs.tempfile() + '.gpkg'
        data_source = driver.CreateDataSource(tmp_name)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        # features can be polygons or multi-polygons
        layer = data_source.CreateLayer(str(map_name), srs,
                                        ogr.wkbMultiPolygon)

        # attributes
        attrs = OrderedDict([("uuid", ogr.OFTString),
                             ("ingestiondate", ogr.OFTString),
                             ("cloudcoverpercentage", ogr.OFTInteger),
                             ("producttype", ogr.OFTString),
                             ("identifier", ogr.OFTString)])
        for key in attrs.keys():
            field = ogr.FieldDefn(key, attrs[key])
            layer.CreateField(field)

        # features
        for idx in range(len(self._products_df_sorted['uuid'])):
            wkt = self._products_df_sorted['footprint'][idx]
            feature = ogr.Feature(layer.GetLayerDefn())
            newgeom = ogr.CreateGeometryFromWkt(wkt)
            # convert polygons to multi-polygons
            newgeomtype = ogr.GT_Flatten(newgeom.GetGeometryType())
            if newgeomtype == ogr.wkbPolygon:
                multigeom = ogr.Geometry(ogr.wkbMultiPolygon)
                multigeom.AddGeometryDirectly(newgeom)
                feature.SetGeometry(multigeom)
            else:
                feature.SetGeometry(newgeom)
            for key in attrs.keys():
                if key == 'ingestiondate':
                    value = self._products_df_sorted[key][idx].strftime(
                        "%Y-%m-%dT%H:%M:%SZ")
                else:
                    value = self._products_df_sorted[key][idx]
                feature.SetField(key, value)
            layer.CreateFeature(feature)
            feature = None

        data_source = None

        # coordinates of footprints are in WKT -> fp precision issues
        # -> snap
        gs.run_command('v.import',
                       input=tmp_name,
                       output=map_name,
                       layer=map_name,
                       snap=1e-10,
                       quiet=True)

    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        from sentinelsat.sentinel import SentinelAPIError

        self._products_df_sorted = {'uuid': []}
        for uuid in uuid_list:
            try:
                odata = self._api.get_product_odata(uuid, full=True)
            except SentinelAPIError as e:
                gs.error('{0}. UUID {1} skipped'.format(e, uuid))
                continue

            for k, v in odata.items():
                if k == 'id':
                    k = 'uuid'
                elif k == 'Sensing start':
                    k = 'beginposition'
                elif k == 'Product type':
                    k = 'producttype'
                elif k == 'Cloud cover percentage':
                    k = 'cloudcoverpercentage'
                elif k == 'Identifier':
                    k = 'identifier'
                elif k == 'Ingestion Date':
                    k = 'ingestiondate'
                elif k == 'footprint':
                    pass
                else:
                    continue
                if k not in self._products_df_sorted:
                    self._products_df_sorted[k] = []
                self._products_df_sorted[k].append(v)
コード例 #20
0
def main(dwndir, csvpath, logpath, apipath):
    # Configure file for logging
    # ==========================
    logging.basicConfig(
        filename=logpath,
        format="%(asctime)s - %(module)s - %(levelname)s - %(message)s",
        level=logging.INFO)
    with open(logpath, "a") as f:
        loghead = f"\nStarting a new session\n{datetime.now()}\n" + 26 * "=" + "\n"
        f.write(loghead)
    print("Started script download_LTA.py")
    logging.info("Started script download_LTA.py")

    # Read password file
    # ==================
    try:
        with open(apipath) as f:
            (usrnam, psswrd) = f.readline().split(" ")
            if psswrd.endswith("\n"):
                psswrd = psswrd[:-1]
    except IOError:
        logging.info("Error reading the password file!")
        sys.exit("Error reading the password file!")

    # Connect to API using <username> and <password>
    # ==============================================
    print("Connecting to SciHub API...")
    api = SentinelAPI(usrnam, psswrd, "https://scihub.copernicus.eu/dhus")

    # Read CSV file
    # =============
    try:
        dwnfil = pd.read_csv(csvpath)
        print(f"Import CSV file: {basename(csvpath)}")
        logging.info(f"Import CSV file: {basename(csvpath)}")
    except IOError:
        logging.info("Error reading the CSV file!")
        sys.exit("Error reading the CSV file!")

    # Download files from the CSV list
    # ================================
    max_attempts = 10
    checksum = True
    product_ids = list(dwnfil['uuid'])
    # product_example = "672e5131-c79d-4500-b825-9dabf40662e3"
    print(f"Found {len(product_ids)} products in CSV list.\n")
    logging.info(f"Found {len(product_ids)} products in CSV list.\n")

    # Main loop for download
    # ======================
    for i, product_id in enumerate(product_ids):
        if not dwnfil['downloaded'].get(i):
            current_file = api.get_product_odata(product_id)
            print(f"     Next file: {current_file['title']}")
            print(f"     File uuid: {product_id}")
            logging.info(f"     Next file: {current_file['title']}")
            logging.info(f"     File uuid: {product_id}")

            # If product isn't online (it is in LTA), wait and check every 5 min
            waiting = 0
            while not current_file['Online']:
                if waiting == 0:
                    print("File is offline, waiting 5 min...")
                    logging.info("File is offline, waiting 5 min...")
                sleep(5 * 60)
                waiting += 5
                print(f"{waiting} min has passed, file still Offline..")
                logging.info(f"{waiting} min has passed, file still Offline..")

            # When file goes online proceed with download
            for attempt_num in range(max_attempts):
                try:
                    api.download(product_id, dwndir, checksum)
                    # Update CSV file
                    dwnfil.at[i, 'downloaded'] = True
                    dwnfil.to_csv(csvpath, index=False)
                    logging.info("CSV file updated\n")
                    break
                except (KeyboardInterrupt, SystemExit):
                    raise
                except InvalidChecksumError as e:
                    logging.info(
                        f"Invalid checksum. The downloaded file for '{product_id}' is corrupted."
                    )
                    logging.error(e)
                except Exception as e:
                    logging.info(
                        f"There was an error downloading {product_id}")
                    logging.error(e)
            else:
                logging.info(f"    ****  File {product_id} was not Online!\n")
        else:
            logging.info(
                f"SKIP!  File {product_id} has already been downloaded.\n")

    # End message
    # ============
    print("---------  Session finished  ---------")
    logging.info(f"---------  Session {dwndir} finished  ---------\n")
    logging.shutdown()
コード例 #21
0
def classification(footprint,o_footprint,start,end):
    # setup API and login info
    api =SentinelAPI('mom_sentinel2', 'sentinel2Download!','https://scihub.copernicus.eu/apihub/')
    # search data with some limits like range of date and cloud cover using given footprint.
    products =api.query(footprint,date=(start,end), platformname='Sentinel-2', producttype = 'S2MSI2A', cloudcoverpercentage=(0,30))
    product_name = []
    date = start
    # download searched data to local
    for product in products:
        product_info = api.get_product_odata(product)
        product_name.append(product_info['title'])
        api.download(product)
    product_name.reverse()
    # use terminal quary to translate B2, B3, B4, and B8 as one multiple tif file.
    for file_name in product_name:
        quary = 'gdal_pansharpen.py -h'
        info = os.popen(quary)
        d = info.read()
        d = d.split('SUBDATASET_1_NAME=')
        d = d[1].split('\n')
        q = 'gdal_translate ' + d[0] + ' '+file_name+'.tif'
        info = os.popen(q)
        d = info.read()

    minX, maxY, maxX, minY = get_extent(product_name[0])
    if len(product_name)> 1:
        for fn in product_name[1:]:
            minx, maxy, maxx, miny = get_extent(fn)
            minX = min(minX, minx)
            maxY = max(maxY, maxy)
            maxX = max(maxX, maxx)
            minY = min(minY, miny)

    in_ds = gdal.Open(product_name[0])
    gt = in_ds.GetGeoTransform()
    rows = int(int(maxX - minX) / abs(gt[5]))
    cols = int(int(maxY - minY) / gt[1])

    driver = gdal.GetDriverByName("GTiff")
    out_ds = driver.Create('union.tif', rows, cols, 4, gdal.GDT_Byte)
    out_ds.SetProjection(in_ds.GetProjection())

    gt = list(in_ds.GetGeoTransform())
    gt[0], gt[3] = minX, maxY
    out_ds.SetGeoTransform(gt)

    for fn in product_name:
        in_ds = gdal.Open(fn)
        trans = gdal.Transformer(in_ds, out_ds, [])
        success, xyz = trans.TransformPoint(False, 0, 0)
        x, y, z = map(int, xyz)
        data = in_ds.GetRasterBand(1).ReadAsArray()
        for i in range(4):
        out_ds.GetRasterBand(i+1).WriteArray(data[i],x,y)
    del in_ds, out_band, out_ds

    src_path = date+'union.tif'
    src = rio.open(src_path)
    shpdata = GeoDataFrame.from_file('../shapefile/Polygon.shp')
    out_shpdata = shpdata.copy()
    shpdata=shpdata.to_crs(src.crs)

    features = shpdata.geometry[0].__geo_interface__
    out_image, out_transform = rio.mask.mask(src, [features], crop=True, nodata=src.nodata)
    out_meta = src.meta.copy()
    out_meta.update({"driver": "GTiff",
                     "height": out_image.shape[1],
                     "width": out_image.shape[2],
                     "transform": out_transform})
    band_mask = rasterio.open('../geo_tiff/'+date+'cutted.tif', "w", **out_meta)
    band_mask.write(out_image)
    #model = create_model()
    model = load_model('../saved_model/MD_model.txt')

    # calculate NDVI & MDWI band for each file
    dataset=gdal.Open('../geo_tiff/'date+'cutted.tif')
    im_proj,im_geotrans,im_data = read_img(dataset)
    ndvi = get_ndvi(im_data)
    ndwi = get_ndwi(im_data)
    c,h,w = im_data.shape
    new_data = np.zeros((6,h,w))
    for i in range(4) :
        temp = np.max(im_data[i])
        new_data[i,:,:] = im_data[i]/temp
    new_data[4,:,:] = ndvi
    new_data[5,:,:] = ndwi
    input_pic = new_data
    # detect and save result
    ans = cal_box_new(model,input_pic)
    save_path = '../geo_tiff_test/'
    write_img(date+'_class.tif',im_proj,im_geotrans,ans)
コード例 #22
0
        while len(ids):
            while cont>=0:
                print(n,m)
                if cont==0:
                    api = SentinelAPI('biancasantana', '988245535', 'https://scihub.copernicus.eu/dhus',show_progressbars=True)
                elif cont==1:
                    api = SentinelAPI('_labgis', '988245535', 'https://scihub.copernicus.eu/dhus',show_progressbars=True)
                else:
                    api = SentinelAPI('labgis_', '988245535', 'https://scihub.copernicus.eu/dhus',show_progressbars=True)

                for e in range(n,m,1):
                    if (m!=(e)):
                        list.append(ids[e])
                break
            
            product_info = api.get_product_odata(list[0])
            product_info1 = api.get_product_odata(list[1])
            if product_info['Online']:
                print('Product {} is online. Starting download.'.format(list[0]))
                api.download(list[0])
            elif product_info1['Online']:
                print('Product {} is online. Starting download.'.format(list[1]))
                api.download(list[1])
            else:
                print('Product {} is not online.'.format(list[0]))
                print('Product {} is not online.'.format(list[1]))
        
            print(list)
            n+=2
            m+=2
            list=[]
コード例 #23
0
ファイル: test_mod.py プロジェクト: ibamacsr/sentinelsat
def test_get_product_info_bad_key():
    api = SentinelAPI(**_api_auth)

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('invalid-xyz')
    assert excinfo.value.msg == "InvalidKeyException : Invalid key (invalid-xyz) to access Products"
コード例 #24
0
def download_imagedata(footprint_geojson, startdate, enddate):
    """ download image data with SentinelAPI """

    # .netrc method is not working
    api = SentinelAPI('flooda31', 'floodproject',
                      'https://scihub.copernicus.eu/dhus')
    footprint = geojson_to_wkt(read_geojson(footprint_geojson))
    products = api.query(footprint,
                         date=(startdate, enddate),
                         platformname='Sentinel-2',
                         producttype='S2MSI2A',
                         cloudcoverpercentage=(0, 30))

    product_name = []
    date = startdate

    # download searched data to local
    for product in products:
        product_info = api.get_product_odata(product)
        is_online = product_info['Online']

        if is_online:
            product_name.append(product_info['title'])
            api.download(product)
        else:
            # offline data issue:
            # https://scihub.copernicus.eu/userguide/LongTermArchive
            # https://sentinelsat.readthedocs.io/en/latest/api_overview.html#lta-products
            # may try asf
            print('Product {} is not online.'.format(product))

    product_name.reverse()

    # use terminal quary to translate B2, B3, B4, and B8 as one multiple tif file.
    for file_name in product_name:
        print(file_name)
        quary = 'gdalinfo ' + file_name + ".zip"
        info = os.popen(quary)
        d = info.read()
        d = d.split('SUBDATASET_1_NAME=')
        d = d[1].split('\n')
        q = 'gdal_translate ' + d[0] + ' ' + file_name + '.tif'
        info = os.popen(q)
        d = info.read()

    minX, maxY, maxX, minY = get_extent(product_name[0] + ".tif")
    if len(product_name) > 1:
        for fn in product_name[1:]:
            minx, maxy, maxx, miny = get_extent(fn + ".tif")
            minX = min(minX, minx)
            maxY = max(maxY, maxy)
            maxX = max(maxX, maxx)
            minY = min(minY, miny)

    in_ds = gdal.Open(product_name[0] + ".tif")
    gt = in_ds.GetGeoTransform()
    rows = int(int(maxX - minX) / abs(gt[5]))
    cols = int(int(maxY - minY) / gt[1])

    driver = gdal.GetDriverByName("GTiff")
    out_ds = driver.Create('union.tif', rows, cols, 4, gdal.GDT_Byte)
    out_ds.SetProjection(in_ds.GetProjection())

    gt = list(in_ds.GetGeoTransform())
    gt[0], gt[3] = minX, maxY
    out_ds.SetGeoTransform(gt)

    for fn in product_name:
        in_ds = gdal.Open(fn + ".tif")
        trans = gdal.Transformer(in_ds, out_ds, [])
        success, xyz = trans.TransformPoint(False, 0, 0)
        x, y, z = map(int, xyz)

        for i in range(4):
            data = in_ds.GetRasterBand(i + 1).ReadAsArray()
            out_ds.GetRasterBand(i + 1).WriteArray(data, x, y)
    del in_ds, out_ds

    # cut image
    src_path = 'union.tif'
    src = rio.open(src_path)

    shpdata = gpd.read_file(footprint_geojson)
    out_shpdata = shpdata.copy()
    shpdata = shpdata.to_crs(src.crs)

    features = shpdata.geometry[0].__geo_interface__
    out_image, out_transform = rio.mask.mask(src, [features],
                                             crop=True,
                                             nodata=src.nodata)
    out_meta = src.meta.copy()
    out_meta.update({
        "driver": "GTiff",
        "height": out_image.shape[1],
        "width": out_image.shape[2],
        "transform": out_transform
    })
    cutted_tif = "cutted.tif"
    band_mask = rio.open(cutted_tif, "w", **out_meta)
    band_mask.write(out_image)

    return cutted_tif
コード例 #25
0
def test_get_product_info_bad_key():
    api = SentinelAPI(**_api_auth)

    with pytest.raises(SentinelAPIError) as excinfo:
        api.get_product_odata('invalid-xyz')
    assert excinfo.value.msg == "InvalidKeyException : Invalid key (invalid-xyz) to access Products"
コード例 #26
0
class SentinelDownloader(object):
    def __init__(
        self,
        user,
        password,
        api_url="https://apihub.copernicus.eu/apihub",
        cred_req=True,
    ):
        self._apiname = api_url
        self._user = user
        self._password = password
        self._cred_req = cred_req

        # init logger
        root = logging.getLogger()
        root.addHandler(logging.StreamHandler(sys.stderr))
        if self._apiname not in ["USGS_EE", "GCS"]:
            try:
                from sentinelsat import SentinelAPI
            except ImportError as e:
                gs.fatal(
                    _("Module requires sentinelsat library: {}").format(e))
            # connect SciHub via API
            self._api = SentinelAPI(self._user,
                                    self._password,
                                    api_url=self._apiname)
        elif self._apiname == "USGS_EE":
            api_login = False
            while api_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    self._api = landsatxplore.api.API(self._user,
                                                      self._password)
                    api_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
        self._products_df_sorted = None

    def filter(
        self,
        area,
        area_relation,
        clouds=None,
        producttype=None,
        limit=None,
        query={},
        start=None,
        end=None,
        sortby=[],
        asc=True,
        relativeorbitnumber=None,
    ):
        # Dict to identify plaforms from requested product
        platforms = {
            "SL": "Sentinel-1",
            "GR": "Sentinel-1",
            "OC": "Sentinel-1",
            "S2": "Sentinel-2",
            "S3": "Sentinel-3",
        }
        args = {}
        if clouds:
            args["cloudcoverpercentage"] = (0, int(clouds))
        if relativeorbitnumber:
            args["relativeorbitnumber"] = relativeorbitnumber
            if producttype.startswith("S2") and int(relativeorbitnumber) > 143:
                gs.warning("This relative orbit number is out of range")
            elif int(relativeorbitnumber) > 175:
                gs.warning(_("This relative orbit number is out of range"))
        if producttype:
            if producttype.startswith("S3"):
                # Using custom product names for Sentinel-3 products that look less cryptic
                split = [0, 2, 4, 5, 8]
                args["producttype"] = "_".join([
                    producttype[i:j] for i, j in zip(split, split[1:] + [None])
                ][1:]).ljust(11, "_")
            else:
                args["producttype"] = producttype
            args["platformname"] = platforms[producttype[0:2]]
        if not start:
            start = "NOW-60DAYS"
        else:
            start = start.replace("-", "")
        if not end:
            end = "NOW"
        else:
            end = end.replace("-", "")
        if query:
            redefined = [
                value for value in args.keys() if value in query.keys()
            ]
            if redefined:
                gs.warning(
                    _("Query overrides already defined options ({})").format(
                        ",".join(redefined)))
            args.update(query)
        gs.verbose(
            _("Query: area={} area_relation={} date=({}, {}) args={}").format(
                area, area_relation, start, end, args))
        if self._cred_req is False:
            # in the main function it is ensured that there is an "identifier" query
            self._products_df_sorted = pandas.DataFrame(
                {"identifier": [query["identifier"]]})
            return

        products = self._api.query(area=area,
                                   area_relation=area_relation,
                                   date=(start, end),
                                   **args)
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_("No product found"))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby, ascending=[asc] * len(sortby))
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(
                int(limit))

        gs.message(
            _("{} Sentinel product(s) found").format(
                len(self._products_df_sorted)))

    def list(self):
        if self._products_df_sorted is None:
            return
        id_kw = ("uuid", "entity_id")
        identifier_kw = ("identifier", "display_id")
        cloud_kw = ("cloudcoverpercentage", "cloud_cover")
        time_kw = ("beginposition", "acquisition_date")
        kw_idx = 1 if self._apiname == "USGS_EE" else 0
        for idx in range(len(self._products_df_sorted[id_kw[kw_idx]])):
            if cloud_kw[kw_idx] in self._products_df_sorted:
                ccp = "{0:2.0f}%".format(
                    float(self._products_df_sorted[cloud_kw[kw_idx]][idx]))
            else:
                ccp = "cloudcover_NA"

            print_str = "{0} {1}".format(
                self._products_df_sorted[id_kw[kw_idx]][idx],
                self._products_df_sorted[identifier_kw[kw_idx]][idx],
            )
            if kw_idx == 1:
                time_string = self._products_df_sorted[time_kw[kw_idx]][idx]
            else:
                time_string = self._products_df_sorted[
                    time_kw[kw_idx]][idx].strftime("%Y-%m-%dT%H:%M:%SZ")
            print_str += " {0} {1}".format(time_string, ccp)
            if kw_idx == 0:
                print_str += " {0}".format(
                    self._products_df_sorted["producttype"][idx])
                print_str += " {0}".format(
                    self._products_df_sorted["size"][idx])

            print(print_str)

    def skip_existing(self, output, pattern_file):
        prod_df_type = type(self._products_df_sorted)
        # Check i skipping is possible/required
        if prod_df_type != dict:
            if self._products_df_sorted.empty:
                return
        elif not self._products_df_sorted or os.path.exists(output) == False:
            return
        # Check if ingestion date is returned by API
        if "ingestiondate" not in self._products_df_sorted:
            gs.warning(
                _("Ingestiondate not returned. Cannot filter previously downloaded scenes"
                  ))
            return
        # Check for previously downloaded scenes
        existing_files = [
            f for f in os.listdir(output)
            if re.search(r".zip$|.safe$|.ZIP$|.SAFE$", f)
        ]
        if len(existing_files) <= 1:
            return
        # Filter by ingestion date
        skiprows = []
        for idx, display_id in enumerate(
                self._products_df_sorted["identifier"]):
            existing_file = [
                sfile for sfile in existing_files if display_id in sfile
            ]
            if existing_file:
                creation_time = datetime.fromtimestamp(
                    os.path.getctime(existing_file[0]))
                if self._products_df_sorted["ingestiondate"][
                        idx] <= creation_time:
                    gs.message(
                        _("Skipping scene: {} which is already downloaded.".
                          format(self._products_df_sorted["identifier"][idx])))
                    skiprows.append(display_id)
        if prod_df_type == dict:
            for scene in skiprows:
                idx = self._products_df_sorted["identifier"].index(scene)
                for key in self._products_df_sorted:
                    self._products_df_sorted[key].pop(idx)
        else:
            self._products_df_sorted = self._products_df_sorted[
                ~self._products_df_sorted["identifier"].isin(skiprows)]

    def download(self,
                 output,
                 sleep=False,
                 maxretry=False,
                 datasource="ESA_COAH"):
        if self._products_df_sorted is None:
            return

        create_dir(output)
        gs.message(_("Downloading data into <{}>...").format(output))
        if datasource == "USGS_EE":
            from landsatxplore.earthexplorer import EarthExplorer
            from landsatxplore.errors import EarthExplorerError
            from zipfile import ZipFile

            ee_login = False
            while ee_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    ee = EarthExplorer(self._user, self._password)
                    ee_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
            for idx in range(len(self._products_df_sorted["entity_id"])):
                scene = self._products_df_sorted["entity_id"][idx]
                identifier = self._products_df_sorted["display_id"][idx]
                zip_file = os.path.join(output, "{}.zip".format(identifier))
                gs.message(_("Downloading {}...").format(identifier))
                try:
                    ee.download(identifier=identifier,
                                output_dir=output,
                                timeout=600)
                except EarthExplorerError as e:
                    gs.fatal(_(e))
                ee.logout()
                # extract .zip to get "usual" .SAFE
                with ZipFile(zip_file, "r") as zip:
                    safe_name = zip.namelist()[0].split("/")[0]
                    outpath = os.path.join(output, safe_name)
                    zip.extractall(path=output)
                gs.message(_("Downloaded to <{}>").format(outpath))
                try:
                    os.remove(zip_file)
                except Exception as e:
                    gs.warning(
                        _("Unable to remove {0}:{1}").format(zip_file, e))

        elif datasource == "ESA_COAH":
            for idx in range(len(self._products_df_sorted["uuid"])):
                gs.message("{} -> {}.SAFE".format(
                    self._products_df_sorted["uuid"][idx],
                    os.path.join(output,
                                 self._products_df_sorted["identifier"][idx]),
                ))
                # download
                out = self._api.download(self._products_df_sorted["uuid"][idx],
                                         output)
                if sleep:
                    x = 1
                    online = out["Online"]
                    while not online:
                        # sleep is in minutes so multiply by 60
                        time.sleep(int(sleep) * 60)
                        out = self._api.download(
                            self._products_df_sorted["uuid"][idx], output)
                        x += 1
                        if x > maxretry:
                            online = True
        elif datasource == "GCS":
            for scene_id in self._products_df_sorted["identifier"]:
                gs.message(_("Downloading {}...").format(scene_id))
                dl_code = download_gcs(scene_id, output)
                if dl_code == 0:
                    gs.message(
                        _("Downloaded to {}").format(
                            os.path.join(output, "{}.SAFE".format(scene_id))))
                else:
                    # remove incomplete file
                    del_folder = os.path.join(output,
                                              "{}.SAFE".format(scene_id))
                    try:
                        shutil.rmtree(del_folder)
                    except Exception as e:
                        gs.warning(
                            _("Unable to removed unfinished "
                              "download {}".format(del_folder)))

    def save_footprints(self, map_name):
        if self._products_df_sorted is None:
            return
        if self._apiname == "USGS_EE":
            gs.fatal(
                _("USGS Earth Explorer does not support footprint download."))
        try:
            from osgeo import ogr, osr
        except ImportError as e:
            gs.fatal(
                _("Option <footprints> requires GDAL library: {}").format(e))

        gs.message(_("Writing footprints into <{}>...").format(map_name))
        driver = ogr.GetDriverByName("GPKG")
        tmp_name = gs.tempfile() + ".gpkg"
        data_source = driver.CreateDataSource(tmp_name)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        # features can be polygons or multi-polygons
        layer = data_source.CreateLayer(str(map_name), srs,
                                        ogr.wkbMultiPolygon)

        # attributes
        attrs = OrderedDict([
            ("uuid", ogr.OFTString),
            ("ingestiondate", ogr.OFTString),
            ("cloudcoverpercentage", ogr.OFTInteger),
            ("producttype", ogr.OFTString),
            ("identifier", ogr.OFTString),
        ])

        # Sentinel-1 data does not have cloudcoverpercentage
        prod_types = [type for type in self._products_df_sorted["producttype"]]
        if not any(type in prod_types for type in cloudcover_products):
            del attrs["cloudcoverpercentage"]

        for key in attrs.keys():
            field = ogr.FieldDefn(key, attrs[key])
            layer.CreateField(field)

        # features
        for idx in range(len(self._products_df_sorted["uuid"])):
            wkt = self._products_df_sorted["footprint"][idx]
            feature = ogr.Feature(layer.GetLayerDefn())
            newgeom = ogr.CreateGeometryFromWkt(wkt)
            # convert polygons to multi-polygons
            newgeomtype = ogr.GT_Flatten(newgeom.GetGeometryType())
            if newgeomtype == ogr.wkbPolygon:
                multigeom = ogr.Geometry(ogr.wkbMultiPolygon)
                multigeom.AddGeometryDirectly(newgeom)
                feature.SetGeometry(multigeom)
            else:
                feature.SetGeometry(newgeom)
            for key in attrs.keys():
                if key == "ingestiondate":
                    value = self._products_df_sorted[key][idx].strftime(
                        "%Y-%m-%dT%H:%M:%SZ")
                else:
                    value = self._products_df_sorted[key][idx]
                feature.SetField(key, value)
            layer.CreateFeature(feature)
            feature = None

        data_source = None

        # coordinates of footprints are in WKT -> fp precision issues
        # -> snap
        gs.run_command(
            "v.import",
            input=tmp_name,
            output=map_name,
            layer=map_name,
            snap=1e-10,
            quiet=True,
        )

    def get_products_from_uuid_usgs(self, uuid_list):
        scenes = []
        for uuid in uuid_list:
            metadata = self._api.metadata(uuid, "SENTINEL_2A")
            scenes.append(metadata)
        scenes_df = pandas.DataFrame.from_dict(scenes)
        self._products_df_sorted = scenes_df
        gs.message(
            _("{} Sentinel product(s) found").format(
                len(self._products_df_sorted)))

    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        if self._apiname == "USGS_EE":
            self.get_products_from_uuid_usgs(uuid_list)
        else:
            from sentinelsat.sentinel import SentinelAPIError

            self._products_df_sorted = {"uuid": []}
            for uuid in uuid_list:
                try:
                    odata = self._api.get_product_odata(uuid, full=True)
                except SentinelAPIError as e:
                    gs.error(_("{0}. UUID {1} skipped".format(e, uuid)))
                    continue

                for k, v in odata.items():
                    if k == "id":
                        k = "uuid"
                    elif k == "Sensing start":
                        k = "beginposition"
                    elif k == "Product type":
                        k = "producttype"
                    elif k == "Cloud cover percentage":
                        k = "cloudcoverpercentage"
                    elif k == "Identifier":
                        k = "identifier"
                    elif k == "Ingestion Date":
                        k = "ingestiondate"
                    elif k == "footprint":
                        pass
                    else:
                        continue
                    if k not in self._products_df_sorted:
                        self._products_df_sorted[k] = []
                    self._products_df_sorted[k].append(v)

    def filter_USGS(
        self,
        area,
        area_relation,
        clouds=None,
        producttype=None,
        limit=None,
        query={},
        start=None,
        end=None,
        sortby=[],
        asc=True,
        relativeorbitnumber=None,
    ):
        if area_relation != "Intersects":
            gs.fatal(
                _("USGS Earth Explorer only supports area_relation"
                  " 'Intersects'"))
        if relativeorbitnumber:
            gs.fatal(
                _("USGS Earth Explorer does not support 'relativeorbitnumber'"
                  " option."))
        if producttype and producttype != "S2MSI1C":
            gs.fatal(
                _("USGS Earth Explorer only supports producttype S2MSI1C"))
        if query:
            if not any(
                    key in query
                    for key in ["identifier", "filename", "usgs_identifier"]):
                gs.fatal(
                    _("USGS Earth Explorer only supports query options"
                      " 'filename', 'identifier' or 'usgs_identifier'."))
            if "usgs_identifier" in query:
                # get entityId from usgs identifier and directly save results
                usgs_id = query["usgs_identifier"]
                check_s2l1c_identifier(usgs_id, source="usgs")
                # entity_id = self._api.lookup('SENTINEL_2A', [usgs_id],
                #                              inverse=True)
                entity_id = self._api.get_entity_id([usgs_id], "SENTINEL_2A")
                self.get_products_from_uuid_usgs(entity_id)
                return
            else:
                if "filename" in query:
                    esa_id = query["filename"].replace(".SAFE", "")
                else:
                    esa_id = query["identifier"]
                check_s2l1c_identifier(esa_id, source="esa")
                esa_prod_id = esa_id.split("_")[-1]
                utm_tile = esa_id.split("_")[-2]
                acq_date = esa_id.split("_")[2].split("T")[0]
                acq_date_string = "{0}-{1}-{2}".format(acq_date[:4],
                                                       acq_date[4:6],
                                                       acq_date[6:])
                start_date = end_date = acq_date_string
                # build the USGS style S2-identifier
                if utm_tile.startswith("T"):
                    utm_tile_base = utm_tile[1:]
                bbox = get_bbox_from_S2_UTMtile(utm_tile_base)
        else:
            # get coordinate pairs from wkt string
            str_1 = "POLYGON(("
            str_2 = "))"
            coords = area[area.find(str_1) +
                          len(str_1):area.rfind(str_2)].split(",")
            # add one space to first pair for consistency
            coords[0] = " " + coords[0]
            lons = [float(pair.split(" ")[1]) for pair in coords]
            lats = [float(pair.split(" ")[2]) for pair in coords]
            bbox = (min(lons), min(lats), max(lons), max(lats))
            start_date = start
            end_date = end
        usgs_args = {
            "dataset": "SENTINEL_2A",
            "bbox": bbox,
            "start_date": start_date,
            "end_date": end_date,
        }
        if clouds:
            usgs_args["max_cloud_cover"] = clouds
        if limit:
            usgs_args["max_results"] = limit
        scenes = self._api.search(**usgs_args)
        self._api.logout()
        if query:
            # check if the UTM-Tile is correct, remove otherwise
            for scene in scenes:
                if scene["display_id"].split("_")[1] != utm_tile:
                    scenes.remove(scene)
            # remove redundant scene
            if len(scenes) == 2:
                for scene in scenes:
                    prod_id = scene["display_id"].split("_")[-1]
                    if prod_id != esa_prod_id:
                        scenes.remove(scene)
        if len(scenes) < 1:
            gs.message(_("No product found"))
            return
        scenes_df = pandas.DataFrame.from_dict(scenes)
        if sortby:
            # replace sortby keywords with USGS keywords
            for idx, keyword in enumerate(sortby):
                if keyword == "cloudcoverpercentage":
                    sortby[idx] = "cloud_cover"
                    # turn cloudcover to float to make it sortable
                    scenes_df["cloud_cover"] = pandas.to_numeric(
                        scenes_df["cloud_cover"])
                elif keyword == "ingestiondate":
                    sortby[idx] = "acquisition_date"
                # what does sorting by footprint mean
                elif keyword == "footprint":
                    sortby[idx] = "display_id"
            self._products_df_sorted = scenes_df.sort_values(sortby,
                                                             ascending=[asc] *
                                                             len(sortby),
                                                             ignore_index=True)
        else:
            self._products_df_sorted = scenes_df
        gs.message(
            _("{} Sentinel product(s) found").format(
                len(self._products_df_sorted)))
コード例 #27
0
def satdownload(product_id, geojson, download_path='./downloads/',
                remove_trash=False, api=None, download_only=False):
    """
    Downloads, extracts and crops products.
    Args:
        product_id: str
            Example: "e3fea737-a83b-4fec-8a5a-68ed8d647c71"
        geojson: str
            Path to geojson file.
        download_path: str, optional
            location to download products.
        remove_trash: bool, default Fasle
            remove unnecessary files after downloading.
        download_only: bool, default False
            Download only (Do not extract).
        api: SentinelAPI api object
    """

    print('Satdownload for ' + product_id)
    logging.debug('satdownload: ' + product_id)
    # create downloads folder
    if os.path.isdir(download_path) is False:
        os.mkdir(download_path)

    if api is None:
        api = SentinelAPI(USERNAME, PASSWORD,
                          'https://scihub.copernicus.eu/dhus')

    # query product information
    product_info = api.get_product_odata(product_id, full=True)

    sentinel = product_info['Satellite']

    # directory for images only
    target_directory = os.path.join(download_path, product_info['title'])

    if os.path.isdir(target_directory):
        print('Product is already processed, skipping product...')
        return

    # download
    if os.path.isfile(os.path.join(
            download_path, product_info['title'] + '.zip')) is True:
        print(product_info['title'] + '.zip' + ' exist.')
    else:
        satdownload_zip(product_info['id'], download_path, api=api)
    # skip extraction part
    if download_only is True:
        return

    # extract zip file
    zipfile_path = os.path.join(download_path, product_info['title'] + '.zip')
    zip_ref = zipfile.ZipFile(zipfile_path, 'r')
    zip_ref.extractall(download_path)
    zip_ref.close()

    if os.path.isdir(
            os.path.join(download_path, product_info['Filename'])) is False:
        raise Exception('Directory not found after unzipping.')

    # clearing target directory
    if os.path.isdir(target_directory) is True:
        shutil.rmtree(target_directory)
    os.mkdir(target_directory)

    selection = transform_coordinates(coordinates_from_geojson(geojson))

    if sentinel == 'Sentinel-2':
        # product can contain many tails (located in ./GRANULE/)
        granule = os.path.join(download_path, product_info['Filename'],
                               'GRANULE')
        for i, tail_name in enumerate(os.listdir(granule)):
            print('\ttail name: ' + tail_name)
            tail_folder_name = 'tail.{}'.format(i)
            os.mkdir(os.path.join(target_directory, tail_folder_name))

            # image directories are different for different product types
            image_dir = os.path.join(granule, tail_name, 'IMG_DATA')
            if product_info['Product type'] == 'S2MSI2Ap':
                image_dir = os.path.join(image_dir, 'R10m')

            # move bands into target directory
            for image in os.listdir(image_dir):
                image_prime = image
                if product_info['Product type'] == 'S2MSI2Ap':
                    image_prime = image_prime[4:-8] + '.jp2'
                os.rename(os.path.join(image_dir, image),
                          os.path.join(target_directory,
                                       tail_folder_name, image_prime))

    elif sentinel == 'Sentinel-1':
        # shift selection for sentinel-1 products
        dx, dy = 130.54544882194287, 20.162166196209284
        selection[:, 0] = selection[:, 0] + dx
        selection[:, 1] = selection[:, 1] - dy

        # create tail folder
        tail_folder_name = 'tail.{}'.format(0)
        os.mkdir(os.path.join(target_directory, tail_folder_name))

        # image directories are different for different product types
        image_dir = os.path.join(download_path, product_info['Filename'],
                                 'measurement')

        # move bands into target directory
        for image in os.listdir(image_dir):
            image_path = os.path.join(image_dir, image)
            gdal.Warp(image_path, gdal.Open(image_path), dstSRS='EPSG:32638')
            os.rename(image_path, os.path.join(target_directory,
                      tail_folder_name, image))
    else:
        print('Unknown satellite')

    # save info file
    product_info_series = pandas.Series(product_info)
    with open(os.path.join(target_directory, 'info.txt'), 'w') as f:
        f.write(product_info_series.to_string())
    with open(os.path.join(target_directory, 'info.json'), 'w') as f:
        product_info_series.to_json(f)

    # remove unnecessary files
    if remove_trash is True:
        os.remove(zipfile_path)
        shutil.rmtree(os.path.join(download_path, product_info['Filename']))

    # cropping images
    print(target_directory)
    for tail_name in os.listdir(target_directory):
        if os.path.isdir(os.path.join(target_directory, tail_name)) is False:
            continue
        print('\tprocessing ' + tail_name + ' ...')
        process_tail(os.path.join(target_directory, tail_name), selection,
                     remove_trash=remove_trash)
    print('\n\n')
コード例 #28
0
ファイル: api_sentinel_2.py プロジェクト: 2AiBAIT/SatFire2
def apirequest(year="2017"):
    full_path = os.getcwd()
    os.chdir(Path(full_path).parents[0])
    downloadfolder = r"Produtos_Sentinel"
    try:
        if not os.path.exists(downloadfolder):
            os.makedirs(downloadfolder)
    except OSError:
        print("Erro: Criar Pasta " + downloadfolder)

    api = SentinelAPI('pfmacf', '4mcTUobqdf',
                      'https://scihub.copernicus.eu/dhus')
    path = "Dataset" + str(year)
    my_dir_path = Path(path)
    if not my_dir_path.is_dir():
        switcherfunc = {
            0: convertshapeepsg,
            1: createindividualshp,
            2: datatxt,
            3: getgeojson
        }
        for func in switcherfunc:
            _func_exec = switcherfunc[func](year)

    num_sub_folders = [name for name in os.listdir(path)]
    json_folder = check_jsonstats_folder("JsonApis")
    index_filename = "LastIndex_SentinelApi" + " " + str(year) + ".json"
    jpath = os.path.join(json_folder, index_filename)
    jsonstring = "Last Iteration" + " " + str(year)
    unique_txt = r"Unique_Products.txt"
    open(unique_txt, 'a').close()
    contador_vazios = 0
    my_file_index = Path(index_filename)

    if my_file_index.is_file():

        with open(index_filename, "r") as file:
            data = file.read()

        file_dict = json.loads(data)

        index = file_dict[jsonstring]

    else:
        index = 0

    for i in range(index, len(num_sub_folders)):

        pathfinal = os.path.join(path, str(i))
        pathtxt = os.path.join(pathfinal, "Data.txt")
        pathgeojson = os.path.join(pathfinal, "bbox4326.geojson")

        data = linecache.getline(pathtxt, 2)
        date_fire = data.rstrip('\n').split(" ")

        ano = int(date_fire[2].split("-")[0])
        mes = int(date_fire[2].split("-")[1])
        dia = int(date_fire[2].split("-")[2])
        search_date = [ano, mes, dia]

        products = get_footprint(api, pathgeojson, search_date, 14)
        print('\nNumero de produtos encontrados = ', (len(products)))
        print('\nIndex = ', i)
        if not len(products):
            products = get_footprint(api, pathgeojson, search_date, 30)
            if not len(products):
                products = get_footprint(api, pathgeojson, search_date, 60)
                if not len(products):
                    filename = "Sem_Produtos.txt"
                    filepath = os.path.join(pathfinal, filename)
                    write_to_file(filepath, mode="w", text="Invalido")
                    contador_vazios += 1
                    continue

        # converter para DataFrame
        products_df = api.to_dataframe(products)
        products_geojson = api.to_geojson(products)
        valid_list = validate_footprints(products_geojson, pathfinal, i, year)

        if valid_list:

            product_id, product_title = get_final_product(
                valid_list, products_df, pathfinal)
            product_info = api.get_product_odata(product_id)
            file = open(unique_txt, "r+", encoding="utf-8")

            line_found = any(product_title in line for line in file)
            if not line_found:
                write_to_file(unique_txt, mode="a", text=product_title)
                open("Products_to_Download.txt", 'a').close()
                check_availability_download(product_info, product_title,
                                            product_id, api, downloadfolder)
                file.close()
            else:
                file.close()

        else:
            filename = "Sem_Produtos.txt"
            filepath = os.path.join(pathfinal, filename)
            write_to_file(filepath, mode="w", text="Invalido")
            contador_vazios += 1

        json_dict = {jsonstring: i}
        json_dict_exception = {jsonstring: 0}

        if i < len(num_sub_folders) - 1:
            with open(jpath, 'w') as output:
                json.dump(json_dict, output, indent=4)
        else:
            with open(jpath, 'w') as output:
                json.dump(json_dict_exception, output, indent=4)

    print("Contagem de incendios sem Produto: ", contador_vazios)
コード例 #29
0
def test_get_product_odata_full():
    api = SentinelAPI(**_api_auth)

    expected_full = {
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b': {
            'id': '8df46c9e-a20c-43db-a19a-4240c2ed3b8b',
            'title':
            'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC',
            'size': 143549851,
            'md5': 'D5E4DF5C38C6E97BF7E7BD540AB21C05',
            'date': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'footprint':
            'POLYGON((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))',
            'url':
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value",
            'Acquisition Type': 'NOMINAL',
            'Carrier rocket': 'Soyuz',
            'Cycle number': 64,
            'Date': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'Filename':
            'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC.SAFE',
            'Footprint':
            '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml">   <gml:outerBoundaryIs>      <gml:LinearRing>         <gml:coordinates>-5.880887,-63.852531 -5.075419,-67.495872 -3.084356,-67.066071 -3.880541,-63.430576 -5.880887,-63.852531</gml:coordinates>      </gml:LinearRing>   </gml:outerBoundaryIs></gml:Polygon>',
            'Format': 'SAFE',
            'Identifier':
            'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC',
            'Ingestion Date': datetime(2015, 11, 21, 13, 22, 4, 992000),
            'Instrument': 'SAR-C',
            'Instrument abbreviation': 'SAR-C SAR',
            'Instrument description':
            '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>',
            'Instrument description text':
            'The SAR Antenna Subsystem (SAS) is developed and build by AstriumGmbH. It is a large foldable planar phased array antenna, which isformed by a centre panel and two antenna side wings. In deployedconfiguration the antenna has an overall aperture of 12.3 x 0.84 m.The antenna provides a fast electronic scanning capability inazimuth and elevation and is based on low loss and highly stablewaveguide radiators build in carbon fibre technology, which arealready successfully used by the TerraSAR-X radar imaging mission.The SAR Electronic Subsystem (SES) is developed and build byAstrium Ltd. It provides all radar control, IF/ RF signalgeneration and receive data handling functions for the SARInstrument. The fully redundant SES is based on a channelisedarchitecture with one transmit and two receive chains, providing amodular approach to the generation and reception of wide-bandsignals and the handling of multi-polarisation modes. One keyfeature is the implementation of the Flexible Dynamic BlockAdaptive Quantisation (FD-BAQ) data compression concept, whichallows an efficient use of on-board storage resources and minimisesdownlink times.',
            'Instrument mode': 'EW',
            'Instrument name': 'Synthetic Aperture Radar (C-band)',
            'Instrument swath': 'EW',
            'JTS footprint':
            'POLYGON ((-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,-63.852531 -5.880887))',
            'Launch date': 'April 3rd, 2014',
            'Mission datatake id': 50722,
            'Mission type': 'Earth observation',
            'Mode': 'EW',
            'NSSDC identifier': '0000-000A',
            'Operator': 'European Space Agency',
            'Orbit number (start)': 8701,
            'Orbit number (stop)': 8701,
            'Pass direction': 'DESCENDING',
            'Phase identifier': 1,
            'Polarisation': 'VV VH',
            'Product class': 'S',
            'Product class description': 'SAR Standard L1 Product',
            'Product composition': 'Slice',
            'Product level': 'L1',
            'Product type': 'GRD',
            'Relative orbit (start)': 54,
            'Relative orbit (stop)': 54,
            'Resolution': 'Medium',
            'Satellite': 'Sentinel-1',
            'Satellite description':
            '<a target="_blank" href="https://sentinel.esa.int/web/sentinel/missions/sentinel-1">https://sentinel.esa.int/web/sentinel/missions/sentinel-1</a>',
            'Satellite name': 'Sentinel-1',
            'Satellite number': 'A',
            'Sensing start': datetime(2015, 11, 21, 10, 3, 56, 675000),
            'Sensing stop': datetime(2015, 11, 21, 10, 4, 29, 714000),
            'Size': '223.88 MB',
            'Slice number': 1,
            'Start relative orbit number': 54,
            'Status': 'ARCHIVED',
            'Stop relative orbit number': 54,
            'Timeliness Category': 'Fast-24h'
        },
        '44517f66-9845-4792-a988-b5ae6e81fd3e': {
            'id': '44517f66-9845-4792-a988-b5ae6e81fd3e',
            'title':
            'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'size': 5854429622,
            'md5': '48C5648C2644CE07207B3C943DEDEB44',
            'date': datetime(2015, 12, 27, 14, 22, 29),
            'footprint':
            'POLYGON((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'url':
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value",
            'Cloud cover percentage': 18.153846153846153,
            'Date': datetime(2015, 12, 27, 14, 22, 29),
            'Degraded MSI data percentage': 0,
            'Degraded ancillary data percentage': 0,
            'Filename':
            'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229.SAFE',
            'Footprint':
            '<gml:Polygon srsName="http://www.opengis.net/gml/srs/epsg.xml#4326" xmlns:gml="http://www.opengis.net/gml">   <gml:outerBoundaryIs>      <gml:LinearRing>         <gml:coordinates>-4.565257232533263,-58.80274769505742 -5.513960396525286,-58.80535376268811 -5.515947033626909,-57.90315169909761 -5.516014389089381,-57.903151791669515 -5.516044812342758,-57.85874693129081 -5.516142631941845,-57.814323596961835 -5.516075248310466,-57.81432351345917 -5.516633044843839,-57.00018056571297 -5.516700066819259,-57.000180565731384 -5.51666329264377,-56.95603179187787 -5.516693539799448,-56.91188395837315 -5.51662651925904,-56.91188396736038 -5.515947927683427,-56.097209386295305 -5.516014937246069,-56.09720929423562 -5.5159111504805916,-56.053056977999596 -5.515874390220655,-56.00892491028779 -5.515807411549814,-56.00892501130261 -5.513685455771881,-55.10621586418906 -4.6092845892233,-55.108821882251775 -4.606372862374043,-54.20840287327946 -3.658594390979672,-54.21169990975238 -2.710949551849636,-54.214267703869346 -2.7127451087194463,-55.15704255065496 -2.71378646425769,-56.0563616875051 -2.7141556791285275,-56.9561852630143 -2.713837142510183,-57.8999998009875 -3.6180222056692726,-57.90079161941062 -3.616721351843382,-58.800616247288836 -4.565257232533263,-58.80274769505742</gml:coordinates>      </gml:LinearRing>   </gml:outerBoundaryIs></gml:Polygon>',
            'Format': 'SAFE',
            'Format correctness': 'PASSED',
            'General quality': 'PASSED',
            'Generation time': datetime(2015, 12, 28, 11, 25, 23, 357),
            'Geometric quality': 'PASSED',
            'Identifier':
            'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
            'Ingestion Date': datetime(2015, 12, 28, 10, 57, 13, 725000),
            'Instrument': 'MSI',
            'Instrument abbreviation': 'MSI',
            'Instrument mode': 'INS-NOBS',
            'Instrument name': 'Multi-Spectral Instrument',
            'JTS footprint':
            'POLYGON ((-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263))',
            'Mission datatake id': 'GS2A_20151227T140932_002681_N02.01',
            'NSSDC identifier': '2015-000A',
            'Orbit number (start)': 2681,
            'Pass direction': 'DESCENDING',
            'Platform serial identifier': 'Sentinel-2A',
            'Processing baseline': 2.01,
            'Processing level': 'Level-1C',
            'Product type': 'S2MSI1C',
            'Radiometric quality': 'PASSED',
            'Relative orbit (start)': 110,
            'Satellite': 'Sentinel-2',
            'Satellite name': 'Sentinel-2',
            'Satellite number': 'A',
            'Sensing start': datetime(2015, 12, 27, 14, 22, 29),
            'Sensing stop': datetime(2015, 12, 27, 14, 22, 29),
            'Sensor quality': 'PASSED',
            'Size': '5.50 GB'
        }
    }
    for id, expected in expected_full.items():
        ret = api.get_product_odata(id, full=True)
        assert set(ret) == set(expected)
        for k in ret:
            assert ret[k] == expected[k]
コード例 #30
0
for line in out.splitlines():
    # Product bb7a7783-f91b-4a35-907d-a6ddb807da73 - Date: 2019-07-09T02:55:59.024Z, Instrument: MSI, Mode: , Satellite: Sentinel-2, Size: 1.09 GB
    m = re.search('Product\s+(\S+)', line)
    if not m:
        continue
    uuids.append(m.group(1))

names = []
sizes = []
stats = []
if opts.url is not None:
    api = SentinelAPI(opts.user, opts.password, opts.url)
else:
    api = SentinelAPI(opts.user, opts.password)
for i, uuid in enumerate(uuids):
    out = api.get_product_odata(uuid)
    name = out['title']
    size = out['size']
    stat = out['Online']
    names.append(name)
    sizes.append(size)
    stats.append(stat)
    sys.stderr.write('{:4d} {:40s} {:70s} {:10d} {:7s}\n'.format(
        i + 1, uuid, name, size, 'Online' if stat else 'Offline'))
api.session.close()  # has any effect?

if opts.download:
    path = '.' if opts.path is None else opts.path
    for i in range(len(uuids)):
        # Check data availability
        if opts.sort_year:
コード例 #31
0
class SentinelDownloader(object):
    def __init__(self, user, password, api_url='https://scihub.copernicus.eu/apihub'):
        self._apiname = api_url
        self._user = user
        self._password = password

        # init logger
        root = logging.getLogger()
        root.addHandler(logging.StreamHandler(
            sys.stderr
        ))
        if self._apiname == 'https://scihub.copernicus.eu/apihub':
            try:
                from sentinelsat import SentinelAPI
            except ImportError as e:
                gs.fatal(_("Module requires sentinelsat library: {}").format(e))
            # connect SciHub via API
            self._api = SentinelAPI(self._user, self._password,
                                    api_url=self._apiname
                                    )
        elif self._apiname == 'USGS_EE':
            try:
                import landsatxplore.api
                from landsatxplore.errors import EarthExplorerError
            except ImportError as e:
                gs.fatal(_("Module requires landsatxplore library: {}").format(e))
            api_login = False
            while api_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    self._api = landsatxplore.api.API(self._user,
                                                      self._password)
                    api_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
        self._products_df_sorted = None

    def filter(self, area, area_relation,
               clouds=None, producttype=None, limit=None, query={},
               start=None, end=None, sortby=[], asc=True, relativeorbitnumber=None):
        args = {}
        if clouds:
            args['cloudcoverpercentage'] = (0, int(clouds))
        if relativeorbitnumber:
            args['relativeorbitnumber'] = relativeorbitnumber
            if producttype.startswith('S2') and int(relativeorbitnumber) > 143:
                gs.warning("This relative orbit number is out of range")
            elif int(relativeorbitnumber) > 175:
                gs.warning(_("This relative orbit number is out of range"))
        if producttype:
            args['producttype'] = producttype
            if producttype.startswith('S2'):
                args['platformname'] = 'Sentinel-2'
            else:
                args['platformname'] = 'Sentinel-1'
        if not start:
            start = 'NOW-60DAYS'
        else:
            start = start.replace('-', '')
        if not end:
            end = 'NOW'
        else:
            end = end.replace('-', '')
        if query:
            redefined = [value for value in args.keys() if value in query.keys()]
            if redefined:
                gs.warning(_("Query overrides already defined options ({})").format(
                    ','.join(redefined)
                ))
            args.update(query)
        gs.verbose(_("Query: area={} area_relation={} date=({}, {}) args={}").format(
            area, area_relation, start, end, args
        ))
        products = self._api.query(
            area=area, area_relation=area_relation,
            date=(start, end),
            **args
        )
        products_df = self._api.to_dataframe(products)
        if len(products_df) < 1:
            gs.message(_("No product found"))
            return

        # sort and limit to first sorted product
        if sortby:
            self._products_df_sorted = products_df.sort_values(
                sortby,
                ascending=[asc] * len(sortby)
            )
        else:
            self._products_df_sorted = products_df

        if limit:
            self._products_df_sorted = self._products_df_sorted.head(int(limit))

        gs.message(_("{} Sentinel product(s) found").format(len(self._products_df_sorted)))

    def list(self):
        if self._products_df_sorted is None:
            return
        id_kw = ('uuid', 'entity_id')
        identifier_kw = ('identifier', 'display_id')
        cloud_kw = ('cloudcoverpercentage', 'cloud_cover')
        time_kw = ('beginposition', 'acquisition_date')
        kw_idx = 1 if self._apiname == 'USGS_EE' else 0
        for idx in range(len(self._products_df_sorted[id_kw[kw_idx]])):
            if cloud_kw[kw_idx] in self._products_df_sorted:
                ccp = '{0:2.0f}%'.format(
                    float(self._products_df_sorted[cloud_kw[kw_idx]][idx]))
            else:
                ccp = 'cloudcover_NA'

            print_str = '{0} {1}'.format(
                self._products_df_sorted[id_kw[kw_idx]][idx],
                self._products_df_sorted[identifier_kw[kw_idx]][idx])
            if kw_idx == 1:
                time_string = self._products_df_sorted[time_kw[kw_idx]][idx]
            else:
                time_string = self._products_df_sorted[
                    time_kw[kw_idx]][idx].strftime("%Y-%m-%dT%H:%M:%SZ")
            print_str += ' {0} {1}'.format(time_string, ccp)
            if kw_idx == 0:
                print_str += ' {0}'.format(
                    self._products_df_sorted['producttype'][idx])

            print(print_str)

    def download(self, output, sleep=False, maxretry=False,
                 datasource='ESA_COAH'):
        if self._products_df_sorted is None:
            return

        create_dir(output)
        gs.message(_("Downloading data into <{}>...").format(output))
        if datasource == 'USGS_EE':
            from landsatxplore.earthexplorer import EarthExplorer
            from landsatxplore.errors import EarthExplorerError
            from zipfile import ZipFile
            ee_login = False
            while ee_login is False:
                # avoid login conflict in possible parallel execution
                try:
                    ee = EarthExplorer(self._user, self._password)
                    ee_login = True
                except EarthExplorerError as e:
                    time.sleep(1)
            for idx in range(len(self._products_df_sorted['entity_id'])):
                scene = self._products_df_sorted['entity_id'][idx]
                identifier = self._products_df_sorted['display_id'][idx]
                zip_file = os.path.join(output, '{}.zip'.format(identifier))
                gs.message(_("Downloading {}...").format(identifier))
                try:
                    ee.download(identifier=identifier, output_dir=output, timeout=600)
                except EarthExplorerError as e:
                    gs.fatal(_(e))
                ee.logout()
                # extract .zip to get "usual" .SAFE
                with ZipFile(zip_file, 'r') as zip:
                    safe_name = zip.namelist()[0].split('/')[0]
                    outpath = os.path.join(output, safe_name)
                    zip.extractall(path=output)
                gs.message(_("Downloaded to <{}>").format(outpath))
                try:
                    os.remove(zip_file)
                except Exception as e:
                    gs.warning(_("Unable to remove {0}:{1}").format(
                        zip_file, e))

        elif datasource == "ESA_COAH":
            for idx in range(len(self._products_df_sorted['uuid'])):
                gs.message('{} -> {}.SAFE'.format(
                    self._products_df_sorted['uuid'][idx],
                    os.path.join(output, self._products_df_sorted['identifier'][idx])
                ))
                # download
                out = self._api.download(self._products_df_sorted['uuid'][idx],
                                         output)
                if sleep:
                    x = 1
                    online = out['Online']
                    while not online:
                        # sleep is in minutes so multiply by 60
                        time.sleep(int(sleep) * 60)
                        out = self._api.download(self._products_df_sorted['uuid'][idx],
                                                 output)
                        x += 1
                        if x > maxretry:
                            online = True
        elif datasource == 'GCS':
            for scene_id in self._products_df_sorted['identifier']:
                gs.message(_("Downloading {}...").format(scene_id))
                dl_code = download_gcs(scene_id, output)
                if dl_code == 0:
                    gs.message(_("Downloaded to {}").format(
                        os.path.join(output, '{}.SAFE'.format(scene_id))))
                else:
                    # remove incomplete file
                    del_folder = os.path.join(output,
                                              '{}.SAFE'.format(scene_id))
                    try:
                        shutil.rmtree(del_folder)
                    except Exception as e:
                        gs.warning(_("Unable to removed unfinished "
                                     "download {}".format(del_folder)))

    def save_footprints(self, map_name):
        if self._products_df_sorted is None:
            return
        if self._apiname == 'USGS_EE':
            gs.fatal(_(
                "USGS Earth Explorer does not support footprint download."))
        try:
            from osgeo import ogr, osr
        except ImportError as e:
            gs.fatal(_("Option <footprints> requires GDAL library: {}").format(e))

        gs.message(_("Writing footprints into <{}>...").format(map_name))
        driver = ogr.GetDriverByName("GPKG")
        tmp_name = gs.tempfile() + '.gpkg'
        data_source = driver.CreateDataSource(tmp_name)

        srs = osr.SpatialReference()
        srs.ImportFromEPSG(4326)

        # features can be polygons or multi-polygons
        layer = data_source.CreateLayer(str(map_name), srs, ogr.wkbMultiPolygon)

        # attributes
        attrs = OrderedDict([
            ("uuid", ogr.OFTString),
            ("ingestiondate", ogr.OFTString),
            ("cloudcoverpercentage", ogr.OFTInteger),
            ("producttype", ogr.OFTString),
            ("identifier", ogr.OFTString)
        ])

        # Sentinel-1 data does not have cloudcoverpercentage
        prod_types = [type for type in self._products_df_sorted["producttype"]]
        s1_types = ["SLC", "GRD"]
        if any(type in prod_types for type in s1_types):
            del attrs["cloudcoverpercentage"]

        for key in attrs.keys():
            field = ogr.FieldDefn(key, attrs[key])
            layer.CreateField(field)

        # features
        for idx in range(len(self._products_df_sorted['uuid'])):
            wkt = self._products_df_sorted['footprint'][idx]
            feature = ogr.Feature(layer.GetLayerDefn())
            newgeom = ogr.CreateGeometryFromWkt(wkt)
            # convert polygons to multi-polygons
            newgeomtype = ogr.GT_Flatten(newgeom.GetGeometryType())
            if newgeomtype == ogr.wkbPolygon:
                multigeom = ogr.Geometry(ogr.wkbMultiPolygon)
                multigeom.AddGeometryDirectly(newgeom)
                feature.SetGeometry(multigeom)
            else:
                feature.SetGeometry(newgeom)
            for key in attrs.keys():
                if key == 'ingestiondate':
                    value = self._products_df_sorted[key][idx].strftime("%Y-%m-%dT%H:%M:%SZ")
                else:
                    value = self._products_df_sorted[key][idx]
                feature.SetField(key, value)
            layer.CreateFeature(feature)
            feature = None

        data_source = None

        # coordinates of footprints are in WKT -> fp precision issues
        # -> snap
        gs.run_command('v.import', input=tmp_name, output=map_name,
                       layer=map_name, snap=1e-10, quiet=True
                       )

    def get_products_from_uuid_usgs(self, uuid_list):
        scenes = []
        for uuid in uuid_list:
            metadata = self._api.metadata(uuid, 'SENTINEL_2A')
            scenes.append(metadata)
        scenes_df = pandas.DataFrame.from_dict(scenes)
        self._products_df_sorted = scenes_df
        gs.message(_("{} Sentinel product(s) found").format(
            len(self._products_df_sorted)))

    def set_uuid(self, uuid_list):
        """Set products by uuid.

        TODO: Find better implementation

        :param uuid: uuid to download
        """
        if self._apiname == 'USGS_EE':
            self.get_products_from_uuid_usgs(uuid_list)
        else:
            from sentinelsat.sentinel import SentinelAPIError

            self._products_df_sorted = {'uuid': []}
            for uuid in uuid_list:
                try:
                    odata = self._api.get_product_odata(uuid, full=True)
                except SentinelAPIError as e:
                    gs.error(_("{0}. UUID {1} skipped".format(e, uuid)))
                    continue

                for k, v in odata.items():
                    if k == 'id':
                        k = 'uuid'
                    elif k == 'Sensing start':
                        k = 'beginposition'
                    elif k == 'Product type':
                        k = 'producttype'
                    elif k == 'Cloud cover percentage':
                        k = 'cloudcoverpercentage'
                    elif k == 'Identifier':
                        k = 'identifier'
                    elif k == 'Ingestion Date':
                        k = 'ingestiondate'
                    elif k == 'footprint':
                        pass
                    else:
                        continue
                    if k not in self._products_df_sorted:
                        self._products_df_sorted[k] = []
                    self._products_df_sorted[k].append(v)

    def filter_USGS(self, area, area_relation, clouds=None, producttype=None,
                    limit=None, query={}, start=None, end=None, sortby=[],
                    asc=True, relativeorbitnumber=None):
        if area_relation != 'Intersects':
            gs.fatal(_(
                "USGS Earth Explorer only supports area_relation"
                " 'Intersects'"))
        if relativeorbitnumber:
            gs.fatal(_(
                "USGS Earth Explorer does not support 'relativeorbitnumber'"
                " option."))
        if producttype and producttype != 'S2MSI1C':
            gs.fatal(_(
                "USGS Earth Explorer only supports producttype S2MSI1C"))
        if query:
            if not any(key in query for key in ['identifier', 'filename',
                                                'usgs_identifier']):
                gs.fatal(_(
                    "USGS Earth Explorer only supports query options"
                    " 'filename', 'identifier' or 'usgs_identifier'."))
            if 'usgs_identifier' in query:
                # get entityId from usgs identifier and directly save results
                usgs_id = query['usgs_identifier']
                check_s2l1c_identifier(usgs_id, source='usgs')
                # entity_id = self._api.lookup('SENTINEL_2A', [usgs_id],
                #                              inverse=True)
                entity_id = self._api.get_entity_id([usgs_id], 'SENTINEL_2A')
                self.get_products_from_uuid_usgs(entity_id)
                return
            else:
                if "filename" in query:
                    esa_id = query['filename'].replace('.SAFE', '')
                else:
                    esa_id = query['identifier']
                check_s2l1c_identifier(esa_id, source='esa')
                esa_prod_id = esa_id.split('_')[-1]
                utm_tile = esa_id.split('_')[-2]
                acq_date = esa_id.split('_')[2].split('T')[0]
                acq_date_string = '{0}-{1}-{2}'.format(
                    acq_date[:4], acq_date[4:6], acq_date[6:])
                start_date = end_date = acq_date_string
                # build the USGS style S2-identifier
                if utm_tile.startswith('T'):
                    utm_tile_base = utm_tile[1:]
                bbox = get_bbox_from_S2_UTMtile(utm_tile_base)
        else:
            # get coordinate pairs from wkt string
            str_1 = 'POLYGON(('
            str_2 = '))'
            coords = area[area.find(str_1)+len(str_1):area.rfind(str_2)].split(',')
            # add one space to first pair for consistency
            coords[0] = ' ' + coords[0]
            lons = [float(pair.split(' ')[1]) for pair in coords]
            lats = [float(pair.split(' ')[2]) for pair in coords]
            bbox = (min(lons), min(lats), max(lons), max(lats))
            start_date = start
            end_date = end
        usgs_args = {
            'dataset': 'SENTINEL_2A',
            'bbox': bbox,
            'start_date': start_date,
            'end_date': end_date
        }
        if clouds:
            usgs_args['max_cloud_cover'] = clouds
        if limit:
            usgs_args['max_results'] = limit
        scenes = self._api.search(**usgs_args)
        self._api.logout()
        if query:
            # check if the UTM-Tile is correct, remove otherwise
            for scene in scenes:
                if scene['display_id'].split('_')[1] != utm_tile:
                    scenes.remove(scene)
            # remove redundant scene
            if len(scenes) == 2:
                for scene in scenes:
                    prod_id = scene['display_id'].split('_')[-1]
                    if prod_id != esa_prod_id:
                        scenes.remove(scene)
        if len(scenes) < 1:
            gs.message(_("No product found"))
            return
        scenes_df = pandas.DataFrame.from_dict(scenes)
        if sortby:
            # replace sortby keywords with USGS keywords
            for idx, keyword in enumerate(sortby):
                if keyword == 'cloudcoverpercentage':
                    sortby[idx] = 'cloud_cover'
                    # turn cloudcover to float to make it sortable
                    scenes_df['cloud_cover'] = pandas.to_numeric(
                        scenes_df['cloud_cover'])
                elif keyword == 'ingestiondate':
                    sortby[idx] = 'acquisition_date'
                # what does sorting by footprint mean
                elif keyword == 'footprint':
                    sortby[idx] = 'display_id'
            self._products_df_sorted = scenes_df.sort_values(
                sortby,
                ascending=[asc] * len(sortby), ignore_index=True
            )
        else:
            self._products_df_sorted = scenes_df
        gs.message(_("{} Sentinel product(s) found").format(
            len(self._products_df_sorted)))
コード例 #32
0
ファイル: sentinelWrapper.py プロジェクト: bart-lg/geocropper
class SentinelWrapper:
    def __init__(self):

        logger.info("connect to sentinel API")

        # connection to API for search queries and download requests
        self.api = SentinelAPI(config.copernicusUser, config.copernicusPW,
                               config.copernicusURL)

        logger.info("sentinel API connected")

    def get_sentinel_products(self, lat, lon, date_from, date_to, platform,
                              **kwargs):

        logger.info("start sentinel query")

        # convert geolocation coordinates to wkt format
        footprint = geojson_to_wkt(Point((lon, lat)))

        # prepare parameter for cloud coverage
        if "cloudcoverpercentage" in kwargs:
            kwargs["cloudcoverpercentage"] = (0,
                                              kwargs["cloudcoverpercentage"])

        # search query
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                result = self.api.query(footprint,
                                        date=(date_from, date_to),
                                        platformname=platform,
                                        **kwargs)
                break
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

        logger.info("sentinel query complete")

        return result

    # download multiple sentinel products (list of product IDs)
    def download_sentinel_products(self, products):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                logger.info("start downloading sentinel product list")
                self.api.download_all(products, config.bigTilesDir)
                logger.info("download complete")
                break
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    # download sentinel product with certain product ID
    def download_sentinel_product(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                logger.info("start downloading sentinel product")
                product_info = self.api.download(product_id,
                                                 config.bigTilesDir)
                if not product_info["Online"]:
                    logger.info("archived download triggered")
                    return False
                else:
                    # TODO: Download should be checked
                    logger.info("download complete")
                    return True
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def get_product_data(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                return self.api.get_product_odata(product_id)
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def ready_for_download(self, product_id):
        for attempt in range(1, config.serverFailureRequestRepeats + 1):
            try:
                return self.api.is_online(product_id)
            except SentinelAPIError as e:
                print(repr(e))
                if attempt < config.serverFailureRequestRepeats:
                    print(
                        f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    logger.info(
                        f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                    )
                    time.sleep(60 * config.serverFailureRequestDelay)
                else:
                    print("Last attempt failed. Aborting.")
                    logger.info(
                        "Last attempt to connect to Sentinel server failed. Aborting."
                    )

    def request_offline_tile(self, last_tile_download_request, product_id):

        # check if last request not within request delay
        last_request = utils.minutes_since_last_download_request()
        if last_request == None or last_request > config.copernicusRequestDelay:

            if last_tile_download_request == None or \
                    utils.minutes_since_timestamp(last_tile_download_request) > config.copernicusRepeatRequestAfterMin:

                for attempt in range(1,
                                     config.serverFailureRequestRepeats + 1):
                    try:
                        # HTTP-Code 202: Accepted for retrieval
                        # TODO: handle other HTTP-Codes as well...
                        product_info = self.api.get_product_odata(product_id)
                        if self.api._trigger_offline_retrieval(
                                product_info["url"]) == 202:
                            return True
                        else:
                            return False
                    except SentinelAPIError as e:
                        print(repr(e))
                        if attempt < config.serverFailureRequestRepeats:
                            print(
                                f"Attempt {attempt} failed. Next try in {config.serverFailureRequestDelay} minutes."
                            )
                            logger.info(
                                f"Attempt {attempt} to connect to Sentinel server failed. Next try in {config.serverFailureRequestDelay} minutes."
                            )
                            time.sleep(60 * config.serverFailureRequestDelay)
                        else:
                            print("Last attempt failed. Aborting.")
                            logger.info(
                                "Last attempt to connect to Sentinel server failed. Aborting."
                            )

        else:

            return False