예제 #1
0
    def __init__(self, username, password, archive, backup_archive=None):
        """Connect API.

        Raise ProcessorFailedError on failure
        """
        from sentinelsat.sentinel import SentinelAPI, SentinelAPIError

        # remember settings for query()
        self.archive = archive
        self.backup_archive = backup_archive

        # connect API
        try:
            self.api = SentinelAPI(username, password, archive)
        except (SentinelAPIError, ConnectionError) as e:
            self.api = None
            if backup_archive:
                # re-try with backup archive
                Logger.error(
                    "Unable to connect {} ({}). Re-trying with {}...".format(
                        archive, e, backup_archive))
                try:
                    self.api = SentinelAPI(username, password, backup_archive)
                except (SentinelAPIError, ConnectionError) as e:
                    self.api = None

            if self.api is None:
                raise ProcessorFailedError(self,
                                           "Unable to connect: {}".format(e),
                                           set_status=False)

        Logger.debug("Sentinel API connected")
예제 #2
0
def test_format_url_custom_api_url():
    api = SentinelAPI("user",
                      "pw",
                      api_url='https://scihub.copernicus.eu/dhus/')
    url = api._format_url()
    assert url.startswith('https://scihub.copernicus.eu/dhus/search')

    api = SentinelAPI("user",
                      "pw",
                      api_url='https://scihub.copernicus.eu/dhus')
    url = api._format_url()
    assert url.startswith('https://scihub.copernicus.eu/dhus/search')
def run_download_from_pool(product_id, out_dir, username, password):
    url = 'https://scihub.copernicus.eu/dhus'
    d_api = SentinelAPI(username, password, url)
    try:
        download_info = d_api.download(product_id, directory_path=out_dir)
    except:
        print('Server connection error')
        return 0
    if os.path.exists(out_dir+'/'+download_info['title']+'.zip'):
        os.mknod(out_dir+'/'+download_info['title']+'.ok')
        print('data_downloaded')
        #os.remove(out_dir+'data_product_id')
    elif download_info['Online']:
        # os.mknod(out_dir+"online_not_downloaded.ok")
        print('online_but_not_downloaded')
    elif not download_info['Online']:
        retrievel_code = d_api._trigger_offline_retrieval(download_info['url'])
        # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes
        if retrievel_code == 202:
            # os.mknod(out_dir+"retrieval_accepted.ok")
            print("offline product retrieval accepted")
        elif retrievel_code == 403:
            # os.mknod(out_dir+"requests_exceed_quota.ok")
            print("offline product requests exceed quota")
        elif retrievel_code == 503:
            # os.mknod(out_dir+"retrieval_not_accepted.ok")
            print("offline product retrieval not accepted")
    return download_info
def run_download(product_id, out_dir, username, password):
    # start the downloading with the data id, output directory, and sentinelsat api
    #file_object = open(out_dir+'data_product_id',"w")
    #file_object.write(product_id)
    #file_object.close()
    url = 'https://scihub.copernicus.eu/dhus'
    d_api = SentinelAPI(username, password, url)
    try:
        download_info = d_api.download(product_id['uuid'], directory_path=out_dir)
    except:
        print('Server connection error')
        return 0

    if os.path.exists(out_dir+download_info['title']+'.zip'):
        os.mknod(out_dir+download_info['title']+'_data_downloaded.ok')
        print('data_downloaded')
        retrievel_code = 1
        #os.remove(out_dir+'data_product_id')
    elif download_info['Online']:
        # os.mknod(out_dir+"online_not_downloaded.ok")
        print('online_but_not_downloaded')
    elif not download_info['Online']:
        retrievel_code = d_api._trigger_offline_retrieval(download_info['url'])
        # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes
        if retrievel_code == 202:
            # os.mknod(out_dir+"retrieval_accepted.ok")
            print("offline product retrieval accepted")
        elif retrievel_code == 403:
            # os.mknod(out_dir+"requests_exceed_quota.ok")
            print("offline product requests exceed quota")
        elif retrievel_code == 503:
            # os.mknod(out_dir+"retrieval_not_accepted.ok")
            print("offline product retrieval not accepted")
    return retrievel_code
def retrieve_mata_data(filename,username,password):
    url = 'https://scihub.copernicus.eu/dhus'
    info = filename.split('_')
    satellite = info[0]
    mode = info[1]
    product = info[2]
    orbitnumber = np.int(info[7])
    time_start = np.int(info[5].split('T')[0])-1
    time_end = str(np.int(time_start+2))
    time_start = str(time_start)

    api = SentinelAPI(username, password, url)
    products = api.query(
                         beginposition=(time_start,time_end),
                         platformname='Sentinel-1',
                         producttype=product,
                         sensoroperationalmode=mode,
                         polarisationmode='VV VH',
                         orbitnumber=orbitnumber
                         )

    products_df  = api.to_dataframe(products)
    index = -1
    for i in range(len(products_df)):
        if products_df['title'][i] in filename:
            index = i

    return products_df.iloc[index]
예제 #6
0
 def Sen2Download(self, dprofile):
     download_profile_args = [
         dprofile.username,
         dprofile.password,
         dprofile.daysdiff,
         dprofile.shape_file_path,
         dprofile.download_dir,
         dprofile.concurrency,
     ]
     username, password, daysdiff, shape_file, directory_path, concurrency = download_profile_args
     logger.info(
         f'Sentinel-1 Downloads starting with dprofile = {dprofile}')
     api = SentinelAPI(username, password,
                       'https://scihub.copernicus.eu/dhus')
     #shapefileto wkt
     footprint = geojson_to_wkt(read_geojson(shape_file))
     #dates to search
     end_date = datetime.datetime.now()
     daysdiff = datetime.timedelta(days=daysdiff)
     start_date = end_date - daysdiff
     #Search for data
     products = api.query(footprint,
                          date=(start_date, end_date),
                          platformname='Sentinel-2',
                          producttype='S2MSI1C',
                          cloudcoverpercentage=(0, 30))
     self.DownloadProducts(self, products, dprofile)
예제 #7
0
def downloadProduct(data):

    index, directory_path, username, password = data
    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    download_data = Downloads.objects.filter(product_id=index).update(
        start_time=datetime.datetime.now(), status='started')
    download_data = Downloads.objects.get(product_id=index)
    # download_data.start_time=datetime.datimetime.now()
    # download_data.status='started'
    # download_data.save()
    logger.info(f'Downloading product with product_id {index} Started')
    try:
        api.download(index, directory_path=directory_path, checksum=True)
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'finished'
        download_data.save()
    except Exception as e:
        logger.exception(
            f'Exception occured {e} while downloading product with   product_id {index}'
        )
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'error'
        download_data.save()
    except AttributeError as e:
        logger.exception(
            f'Exception occured {e} while downloading product with   product_id {index}'
        )
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'error'
        download_data.save()
    logger.info(f'Downloading product with product_id {index} Finished')
예제 #8
0
def download_s1(user, password, dir_raw, dir_nc, start_date, end_date,
                footprint):

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')

    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    products = api.query(footprint,
                         date=(start_date, end_date),
                         producttype='GRD')

    #print(products)

    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']

        print(title)
        file_nc = os.path.join(dir_nc, "%s_VV.nc" % title)
        file_wkt = os.path.join(os.path.dirname(dir_nc), 'wkt',
                                "%s.wkt" % title)

        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        if not os.path.exists(file_nc):
            api.download(product, dir_raw, checksum=True)
예제 #9
0
def test_download_all(tmpdir):
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    filenames = [
        "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
        "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
        "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"
    ]

    api.query_raw(" OR ".join(filenames))
    assert len(api.get_products()) == len(filenames)

    # Download normally
    result = api.download_all(str(tmpdir))
    assert len(result) == len(filenames)
    for path, product_info in result.items():
        pypath = py.path.local(path)
        assert pypath.purebasename in filenames
        assert pypath.check(exists=1, file=1)
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    path, product_info = list(result.items())[0]
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info[
            "id"]
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        result = api.download_all(str(tmpdir), max_attempts=1, checksum=True)
        assert len(result) == len(filenames)
        assert result[path] is None
예제 #10
0
    def download_error_image(img_date,geo_img,img_id,username,password):
        '''
        After read error file(image_error.txt) you can get image info which you failed from COG Sentinel-2, you can use this info with this function
        if you have more than 1 image, you can download with for loop.

        You can find img_date, geo_img and img_id information in image_error.txt file.

        api,target_image_id=download_error_image(img_date,geo_img,img_id,username,password)
        api.download(target_image_id,directory_path='.')
        api.download('7be30c50-31fc-48c4-ab45-fddea9be7877',directory_path='.')

        if you get error like >> Product 7be30c50-31fc-48c4-ab45-fddea9be7877 is not online. Triggering retrieval from long term archive.
        Go to https://sentinelsat.readthedocs.io/en/stable/api.html#lta-products

        username and password should be string
        '''
        api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
        day_before =img_date- datetime.timedelta(days=1)
        day_after =img_date + datetime.timedelta(days=1)
        footprint = geojson_to_wkt(geo_img)
        products = api.query(footprint,
                             #date = ('20181219', date(2018, 12, 29)),
                             date=(day_before,day_after),
                             platformname = 'Sentinel-2',
                             )
        sat_df=api.to_geodataframe(products)
        result=sat_df.loc[sat_df['title']==img_id]
        return api,result.index.values[0]
예제 #11
0
    def __init__(self, location):
        self.options = Options(location)
        self.sql = Esa_Sql(self.options)
        self.api = SentinelAPI(self.options.user, self.options.password)
        self.products = []

        check_and_clean_log_file()
예제 #12
0
def downloadSentinel(user, pw, aoi, start, stop):
    # For image before November 16th, 2015
    curl = pycurl.Curl()
    curl.setopt(pycurl.CAINFO, certifi.where())
    curl.setopt(pycurl.URL, 'https://scihub.copernicus.eu/dhus')
    curl.perform()
    # For image before November 16th, 2015
    api = SentinelAPI(user, pw, 'https://scihub.copernicus.eu/dhus')
    AOI = KMLtoGeoJason.kml2geojson(aoi)
    api.query(get_coordinates(AOI), start, stop, producttype='GRD')
# footprint generation of all found images:
    a=api.get_footprints()
    name = AOI[:-8]+"_S1footprint.geojson"
    foot = open(name, "w")
    foot.write(dumps(a, indent=2) + "\n")
    foot.close()
##
##    with open(name) as f:
##      contents = f.read()
##      display(contents)
# selected image download and unzip:
    imageId = raw_input("Insert Sentinel-1 image id: ")
    output_img = 'C:\Users\ithaca\Documents\Magda\Tool_MIE\SENTINEL-1_TOOL\Immagini_grandi'
    s1 = api.download(imageId, output_img)
    path = os.path.dirname(s1)
    with zipfile.ZipFile(s1, "r") as z:
        z.extractall(path)
예제 #13
0
def get_products(login_json, coordinates, date_start, date_end, download_dir):
    with open(login_json, 'r') as fp:
        LOGIN_INFO = json.load(fp)
    USER_NAME, PASSWORD = list(LOGIN_INFO.values())

    # connect to the API
    api = SentinelAPI(USER_NAME, PASSWORD, 'https://scihub.copernicus.eu/dhus')

    # define a map polygon
    geojson = Polygon(coordinates=coordinates)
    # search by polygon, time, and Hub query keywords
    footprint = geojson_to_wkt(geojson)
    dates = (date_start, date_end)  # (date(2018, 4, 1), date(2018, 4, 11))

    # June to July maps
    products = api.query(
        footprint,
        date=dates,
        platformname='Sentinel-2',
        # producttype='S2MSI2A',
        area_relation='Intersects',  # area of interest is inside footprint
        cloudcoverpercentage=(0, 40))

    # download all results from the search
    api.download_all(products, directory_path=download_dir)
    # product_id = list(products.keys())[0]
    # api.download(id=product_id, directory_path=download_dir)

    # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
    return api.to_geodataframe(products)
예제 #14
0
def test_small_query():
    api = SentinelAPI(**_api_kwargs)
    api.query(**_small_query)
    assert api.last_query == (
        '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) '
        'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")')
    assert api.last_status_code == 200
예제 #15
0
def test_to_geopandas():
    api = SentinelAPI(**_api_auth)
    products = api.query(get_coordinates('tests/map.geojson'),
                         "20151219",
                         "20151228",
                         platformname="Sentinel-2")
    gdf = api.to_geodataframe(products)
예제 #16
0
def test_large_query():
    api = SentinelAPI(**_api_kwargs)
    api.query(**_large_query)
    assert api.last_query == (
        '(beginPosition:[2015-01-01T00:00:00Z TO 2015-12-31T00:00:00Z]) '
        'AND (footprint:"Intersects(POLYGON((0 0,0 10,10 10,10 0,0 0)))")')
    assert api.last_status_code == 200
    assert len(api.products) > api.max_rows
예제 #17
0
def test_format_url():
    api = SentinelAPI(**_api_kwargs)
    start_row = 0
    url = api.format_url(start_row=start_row)

    assert url is api.url
    assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}&start={start}'.format(
        rows=api.max_rows, start=start_row)
예제 #18
0
def test_SentinelAPI_connection():
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2))

    assert api.url == 'https://scihub.copernicus.eu/apihub/search?format=json&rows=15000'
    assert api.last_query == '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) ' + \
                             'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")'
    assert api.content.status_code == 200
예제 #19
0
def test_to_pandas():
    api = SentinelAPI(**_api_auth)
    products = api.query(get_coordinates('tests/map.geojson'),
                         "20151219",
                         "20151228",
                         platformname="Sentinel-2")
    df = api.to_dataframe(products)
    assert 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112701_R110_V20151227T142229_20151227T142229' in df.index
예제 #20
0
def download_scenes(period):
    date_from = period.date_from
    date_to = period.date_to

    # Check if result has already been done
    scene_filename = 's1_{dfrom}_{dto}.tif'.format(
        dfrom=period.date_from.strftime('%Y%m'),
        dto=period.date_to.strftime('%Y%m'))
    scene_path = os.path.join(RESULTS_PATH, scene_filename)
    if os.path.exists(scene_path):
        print(
            "Sentinel-1 mosaic for period {}-{} already done:".format(
                date_from, date_to), scene_path)
        return

    # Prepare API client for download
    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    # Query scenes
    footprint = geojson_to_wkt(read_geojson(AOI_PATH))
    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-1',
                         producttype='GRD',
                         polarisationmode='VV VH',
                         orbitdirection='ASCENDING')

    for k, p in products.items():
        print((k, p['summary']))

    os.makedirs(S1_RAW_PATH, exist_ok=True)

    # Filter already downloaded products
    products_to_download = {
        k: v
        for k, v in products.items() if not os.path.exists(
            os.path.join(S1_RAW_PATH, '{}.zip'.format(v['title'])))
    }

    # Download products
    results = api.download_all(products_to_download,
                               directory_path=S1_RAW_PATH)
    products = list(products.values())

    # Process the images of each product
    with mp.Pool(settings.S1_PROC_NUM_JOBS) as pool:
        pool.map(process_product, products)

    # Create a median composite from all images of each band, generate extra
    # bands and concatenate results into a single multiband imafge.
    superimpose(products)
    median(products, period)
    generate_vvvh(period)
    concatenate_results(period)
    clip_result(period)

    clean_temp_files(period)
예제 #21
0
def s5down(datum=str(dt.date.today()), product_type='no2'):
    # this function will download s5p data of given product_type for given date
    # e.g. s5down('2019-08-15','no2')
    if product_type == 'no2':
        strtype = 'L2__NO2___'
    if product_type == 'aerosols':
        strtype = 'L2__AER_AI'
    datum = dt.datetime.strptime(datum, '%Y-%m-%d').date()
    time_in = dt.datetime.combine(datum, dt.time(0, 0))
    time_out = dt.datetime.combine(datum, dt.time(23, 59))
    api = SentinelAPI('s5pguest', 's5pguest',
                      'https://s5phub.copernicus.eu/dhus')
    #coordinates for CZ:
    footprint = 'POLYGON((12.278971773041526 48.69059060056844,18.98957262575027 48.69059060056844,18.98957262575027 51.081759060281655,12.278971773041526 51.081759060281655,12.278971773041526 48.69059060056844))'
    products = api.query(footprint,
                         date=(time_in, time_out),
                         platformname='Sentinel-5',
                         producttype=strtype)
    print('there are ' + str(len(products)) + ' products found')
    a = api.download_all(products)
    geotiffs = []
    for product_ID in iter(products):
        product_path = a[0][product_ID]['path']
        print('converting ' + product_path + ' to geotiff')
        geotiffs.append(s5p2geotiff(product_path, product_type))
    if not geotiffs:
        print('some error happened, no geotiffs generated')
        clean_downloaded(products, a)
        return None
    tifstring = ''
    for tif in geotiffs:
        tifstring = tifstring + ' ' + tif
    print('merging geotiffs to ' + str(datum) +
          '.tif and cropping for CZ extents')
    outfile = str(datum) + '.' + product_type + '.tif'
    tmpfile = 'tmp.tif'
    os.system(
        'gdal_merge.py -o merged.tif -of GTiff -ul_lr 11.3867 51.4847 19.943 47.7933 -a_nodata 9999 '
        + tifstring)
    if product_type == 'no2':
        #need to compute 1000x
        gdal_calc = 'gdal_calc.py -A merged.tif --outfile=' + tmpfile + ' --calc="(A*1000 > 0)*(A * 1000 < 0.7)*(A * 1000)" --overwrite'
        print(gdal_calc)
        os.system(gdal_calc)
    else:
        tmpfile = 'merged.tif'
    #now oversample using cubic..
    gdalwarp = 'gdalwarp -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 ' + tmpfile + ' ' + outfile
    #gdalwarp = 'gdalwarp -s_srs EPSG:4326 -t_srs EPSG:4326 -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 temp1000.tif '+outfile
    print(gdalwarp)
    os.system(gdalwarp)
    print('(the file will be also saved as {}.tif)'.format(product_type))
    copyfile(outfile, '../data/' + product_type + '.tif')
    #cleaning
    clean_downloaded(products, a)
    return geotiffs
예제 #22
0
def test_SentinelAPI_wrong_credentials():
    api = SentinelAPI("wrong_user", "wrong_password")
    with pytest.raises(SentinelAPIError) as excinfo:
        api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1),
                  datetime(2015, 1, 2))
    assert excinfo.value.http_status == 401

    with pytest.raises(SentinelAPIError):
        api.get_products_size()
        api.get_products()
예제 #23
0
def test_SentinelAPI_connection():
    api = SentinelAPI(**_api_auth)
    api.query(**_small_query)

    assert api.url.startswith(
        'https://scihub.copernicus.eu/apihub/search?format=json&rows={rows}'.
        format(rows=api.max_rows))
    assert api.last_query == (
        '(beginPosition:[2015-01-01T00:00:00Z TO 2015-01-02T00:00:00Z]) '
        'AND (footprint:"Intersects(POLYGON((0 0,1 1,0 1,0 0)))")')
    assert api.last_status_code == 200
예제 #24
0
def test_to_dict():
    api = SentinelAPI(**_api_auth)
    products = api.query(get_coordinates('tests/map.geojson'),
                         "20151219",
                         "20151228",
                         platformname="Sentinel-2")
    dictionary = api.to_dict(products)
    # check the type
    assert isinstance(dictionary, dict)
    # check if dictionary has id key
    assert 'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112701_R110_V20151227T142229_20151227T142229' in dictionary
예제 #25
0
def test_s2_cloudcover():
    api = SentinelAPI(**_api_auth)
    products = api.query(get_coordinates('tests/map.geojson'),
                         "20151219",
                         "20151228",
                         platformname="Sentinel-2",
                         cloudcoverpercentage="[0 TO 10]")
    assert len(products) == 3
    assert products[0]["id"] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef"
    assert products[1]["id"] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e"
    assert products[2]["id"] == "0848f6b8-5730-4759-850e-fc9945d42296"
예제 #26
0
def test_download(tmpdir):
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    uuid = "1f62a176-c980-41dc-b3a1-c735d660c910"
    filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E"
    expected_path = tmpdir.join(filename + ".zip")

    # Download normally
    path, product_info = api.download(uuid, str(tmpdir), checksum=True)
    assert expected_path.samefile(path)
    assert product_info["id"] == uuid
    assert product_info["title"] == filename
    assert product_info["size"] == expected_path.size()

    hash = expected_path.computehash()
    modification_time = expected_path.mtime()
    expected_product_info = product_info

    # File exists, test with checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # File exists, test without checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=False)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # Create invalid file, expect re-download
    with expected_path.open("wb") as f:
        f.seek(expected_product_info["size"] - 1)
        f.write(b'\0')
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test continue
    with expected_path.open("rb") as f:
        content = f.read()
    with expected_path.open("wb") as f:
        f.write(content[:100])
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test MD5 check
    with expected_path.open("wb") as f:
        f.write(b'abcd' * 100)
    assert expected_path.computehash("md5") != hash
    with pytest.raises(InvalidChecksumError):
        api.download(uuid, str(tmpdir), check_existing=True, checksum=True)
예제 #27
0
def download_s2(user, password, dir_raw, dir_nc, start_date, end_date, footprint, pr_status):


    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')
    
    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    #products = api.query(footprint, date=(start_date, end_date), producttype='S2MSI1C')
    products = api.query(footprint, date=(start_date, end_date), 
                         producttype='S2MSI1C',cloudcoverpercentage = (0,20))

   
    #print(products)
    
    l = ['S2A_MSIL1C_20180601T051651_N0206_R062_T43PFN_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFN_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFN_20180613T084228',
         'S2A_MSIL1C_20180601T051651_N0206_R062_T43PFP_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFP_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFP_20180613T084228',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFN_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFN_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFN_20180616T090733',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFP_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFP_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFP_20180616T090733',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFN_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFN_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFN_20180623T084444',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFP_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFP_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFP_20180623T084444',
         'S2A_MSIL1C_20180618T050651_N02206_R019_T43PFN_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFN_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFN_20180626T090058',
         'S2A_MSIL1C_20180618T050651_N0206_R019_T43PFP_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFP_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFP_20180626T090058']
    
    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']
        
        
        if title in l:
            continue
        
        tileNo_time = '%s_%s' % (title.split('_')[5], title.split('_')[2])
    
        try:
            downloadFlag = not pr_status[tileNo_time]
        except KeyError:
            pr_status[tileNo_time] = False
            downloadFlag =True
            print "no error"
        #file_nc = os.path.join(dir_nc, "%s_VV.nc"%os.path.basename(title).split("_")[4])
        #file_nc = os.path.join(dir_nc, "%s_VV.nc" % title[17:48])
        file_wkt = os.path.join(os.path.dirname(dir_nc), "wkt/%s.wkt" % tileNo_time)
                
        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        
        if downloadFlag and not title in l:
            
            api.download(product, dir_raw, checksum=True)
            l.append(title)
        
        return pr_status
예제 #28
0
def test_footprints_s2():
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    api.query(get_coordinates('tests/map.geojson'),
              "20151219",
              "20151228",
              platformname="Sentinel-2")

    with open('tests/expected_search_footprints_s2.geojson',
              'r') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
        # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
        assert set(api.get_footprints()) == set(expected_footprints)
예제 #29
0
def test_footprints_s1():
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    api.query(get_coordinates('tests/map.geojson'),
              datetime(2014, 10, 10),
              datetime(2014, 12, 31),
              producttype="GRD")

    with open('tests/expected_search_footprints_s1.geojson',
              'r') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
        # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
        assert set(api.get_footprints()) == set(expected_footprints)
예제 #30
0
def test_trail_slash_base_url():
    base_urls = [
        'https://scihub.copernicus.eu/dhus/',
        'https://scihub.copernicus.eu/dhus'
    ]

    expected = 'https://scihub.copernicus.eu/dhus/'

    for test_url in base_urls:
        assert SentinelAPI._url_trail_slash(test_url) == expected
        api = SentinelAPI("mock_user", "mock_password", test_url)
        assert api.api_url == expected