Ejemplo n.º 1
0
def get_products(login_json, coordinates, date_start, date_end, download_dir):
    with open(login_json, 'r') as fp:
        LOGIN_INFO = json.load(fp)
    USER_NAME, PASSWORD = list(LOGIN_INFO.values())

    # connect to the API
    api = SentinelAPI(USER_NAME, PASSWORD, 'https://scihub.copernicus.eu/dhus')

    # define a map polygon
    geojson = Polygon(coordinates=coordinates)
    # search by polygon, time, and Hub query keywords
    footprint = geojson_to_wkt(geojson)
    dates = (date_start, date_end)  # (date(2018, 4, 1), date(2018, 4, 11))

    # June to July maps
    products = api.query(
        footprint,
        date=dates,
        platformname='Sentinel-2',
        # producttype='S2MSI2A',
        area_relation='Intersects',  # area of interest is inside footprint
        cloudcoverpercentage=(0, 40))

    # download all results from the search
    api.download_all(products, directory_path=download_dir)
    # product_id = list(products.keys())[0]
    # api.download(id=product_id, directory_path=download_dir)

    # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
    return api.to_geodataframe(products)
Ejemplo n.º 2
0
def test_download_all(tmpdir):
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    filenames = [
        "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
        "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
        "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"
    ]

    api.query_raw(" OR ".join(filenames))
    assert len(api.get_products()) == len(filenames)

    # Download normally
    result = api.download_all(str(tmpdir))
    assert len(result) == len(filenames)
    for path, product_info in result.items():
        pypath = py.path.local(path)
        assert pypath.purebasename in filenames
        assert pypath.check(exists=1, file=1)
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    path, product_info = list(result.items())[0]
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info[
            "id"]
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        result = api.download_all(str(tmpdir), max_attempts=1, checksum=True)
        assert len(result) == len(filenames)
        assert result[path] is None
Ejemplo n.º 3
0
def test_download_all(tmpdir):
    api = SentinelAPI(**_api_auth)
    # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength
    filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E",
                 "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A",
                 "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"]

    api.load_query(" OR ".join(filenames))
    assert len(api.get_products()) == len(filenames)

    # Download normally
    result = api.download_all(str(tmpdir))
    assert len(result) == len(filenames)
    for path, product_info in result.items():
        pypath = py.path.local(path)
        assert pypath.purebasename in filenames
        assert pypath.check(exists=1, file=1)
        assert pypath.size() == product_info["size"]

    # Force one download to fail
    path, product_info = list(result.items())[0]
    py.path.local(path).remove()
    with requests_mock.mock(real_http=True) as rqst:
        url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info["id"]
        json = api.session.get(url).json()
        json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000"
        rqst.get(url, json=json)
        result = api.download_all(str(tmpdir), max_attempts=1, checksum=True)
        assert len(result) == len(filenames)
        assert result[path] is None
def sentinel1_tile_download(file,username,password,tilepath):
    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')

    td = timedelta(days=60)
    endDate = datetime.now()
    startDate = endDate - td

    footprint = geojson_to_wkt(read_geojson(file))
    print(footprint)
    #products = api.query(footprint,
    #                     date=(startDate, endDate),platformname='Sentinel-1')
    products = api.query(footprint,
                         producttype='SLC',
                         orbitdirection='ASCENDING')
    # download all results from the search
    api.download_all(products,directorypath=tilepath)
    return
Ejemplo n.º 5
0
def download_scenes(period):
    date_from = period.date_from
    date_to = period.date_to

    # Check if result has already been done
    scene_filename = 's1_{dfrom}_{dto}.tif'.format(
        dfrom=period.date_from.strftime('%Y%m'),
        dto=period.date_to.strftime('%Y%m'))
    scene_path = os.path.join(RESULTS_PATH, scene_filename)
    if os.path.exists(scene_path):
        print(
            "Sentinel-1 mosaic for period {}-{} already done:".format(
                date_from, date_to), scene_path)
        return

    # Prepare API client for download
    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    # Query scenes
    footprint = geojson_to_wkt(read_geojson(AOI_PATH))
    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-1',
                         producttype='GRD',
                         polarisationmode='VV VH',
                         orbitdirection='ASCENDING')

    for k, p in products.items():
        print((k, p['summary']))

    os.makedirs(S1_RAW_PATH, exist_ok=True)

    # Filter already downloaded products
    products_to_download = {
        k: v
        for k, v in products.items() if not os.path.exists(
            os.path.join(S1_RAW_PATH, '{}.zip'.format(v['title'])))
    }

    # Download products
    results = api.download_all(products_to_download,
                               directory_path=S1_RAW_PATH)
    products = list(products.values())

    # Process the images of each product
    with mp.Pool(settings.S1_PROC_NUM_JOBS) as pool:
        pool.map(process_product, products)

    # Create a median composite from all images of each band, generate extra
    # bands and concatenate results into a single multiband imafge.
    superimpose(products)
    median(products, period)
    generate_vvvh(period)
    concatenate_results(period)
    clip_result(period)

    clean_temp_files(period)
Ejemplo n.º 6
0
def s5down(datum=str(dt.date.today()), product_type='no2'):
    # this function will download s5p data of given product_type for given date
    # e.g. s5down('2019-08-15','no2')
    if product_type == 'no2':
        strtype = 'L2__NO2___'
    if product_type == 'aerosols':
        strtype = 'L2__AER_AI'
    datum = dt.datetime.strptime(datum, '%Y-%m-%d').date()
    time_in = dt.datetime.combine(datum, dt.time(0, 0))
    time_out = dt.datetime.combine(datum, dt.time(23, 59))
    api = SentinelAPI('s5pguest', 's5pguest',
                      'https://s5phub.copernicus.eu/dhus')
    #coordinates for CZ:
    footprint = 'POLYGON((12.278971773041526 48.69059060056844,18.98957262575027 48.69059060056844,18.98957262575027 51.081759060281655,12.278971773041526 51.081759060281655,12.278971773041526 48.69059060056844))'
    products = api.query(footprint,
                         date=(time_in, time_out),
                         platformname='Sentinel-5',
                         producttype=strtype)
    print('there are ' + str(len(products)) + ' products found')
    a = api.download_all(products)
    geotiffs = []
    for product_ID in iter(products):
        product_path = a[0][product_ID]['path']
        print('converting ' + product_path + ' to geotiff')
        geotiffs.append(s5p2geotiff(product_path, product_type))
    if not geotiffs:
        print('some error happened, no geotiffs generated')
        clean_downloaded(products, a)
        return None
    tifstring = ''
    for tif in geotiffs:
        tifstring = tifstring + ' ' + tif
    print('merging geotiffs to ' + str(datum) +
          '.tif and cropping for CZ extents')
    outfile = str(datum) + '.' + product_type + '.tif'
    tmpfile = 'tmp.tif'
    os.system(
        'gdal_merge.py -o merged.tif -of GTiff -ul_lr 11.3867 51.4847 19.943 47.7933 -a_nodata 9999 '
        + tifstring)
    if product_type == 'no2':
        #need to compute 1000x
        gdal_calc = 'gdal_calc.py -A merged.tif --outfile=' + tmpfile + ' --calc="(A*1000 > 0)*(A * 1000 < 0.7)*(A * 1000)" --overwrite'
        print(gdal_calc)
        os.system(gdal_calc)
    else:
        tmpfile = 'merged.tif'
    #now oversample using cubic..
    gdalwarp = 'gdalwarp -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 ' + tmpfile + ' ' + outfile
    #gdalwarp = 'gdalwarp -s_srs EPSG:4326 -t_srs EPSG:4326 -tr 0.015 0.015 -r cubicspline -dstnodata 9999 -srcnodata 9999 temp1000.tif '+outfile
    print(gdalwarp)
    os.system(gdalwarp)
    print('(the file will be also saved as {}.tif)'.format(product_type))
    copyfile(outfile, '../data/' + product_type + '.tif')
    #cleaning
    clean_downloaded(products, a)
    return geotiffs
Ejemplo n.º 7
0
def search(
        user, password, tile, geojson, start, end, download, md5,
        sentinel1, sentinel2, cloud, footprints, path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """
    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if cloud:
        search_kwargs.update(
            {"platformname": "Sentinel-2",
            "cloudcoverpercentage": "[0 TO %s]" % cloud})
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})
        
    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    if tile:
        api.query(point = get_coordinates(tile = tile), initial_date = start, end_date = end, **search_kwargs)
    elif geojson:
        api.query(area = get_coordinates(geojson_file = geojson), initial_date = start, end_date = end, **search_kwargs)
    else:
        raise ValueError("Either a --geojson or --tile arguments must be given.")
    
    if footprints is True:
        footprints_geojson = api.get_footprints()
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        result = api.download_all(path, checksum=md5)
        if md5 is True:
            corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None]
            if len(corrupt_scenes) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for corrupt_tuple in corrupt_scenes:
                        outfile.write("%s : %s\n" % corrupt_tuple)
    else:
        for product in api.get_products():
            print('Product %s - %s' % (product['id'], product['summary']))
        print('---')
        print(
            '%s scenes found with a total size of %.2f GB' %
            (len(api.get_products()), api.get_products_size()))
def download_sentinel(platform, prod_type, scihub_user, scihub_pass, start_date, end_date, region=None, filename=None, down_dir=None):
    from sentinelsat.sentinel import SentinelAPI
    import os
    # change the working directory to the location of files
    if down_dir!=None:
        os.chdir(down_dir)
    print(region)
    # connect to the API
    api = SentinelAPI(scihub_user, scihub_pass, 'https://scihub.copernicus.eu/dhus')
    
    # search by polygon, time, and Hub query keywords
    if region is not None and filename is not None:
      products = api.query(region, date = (start_date, end_date), filename = filename, producttype = prod_type, platformname = platform)
    elif region is not None:
      products = api.query(region, date = (start_date, end_date), producttype = prod_type, platformname = platform)
    elif filename is not None:
      products = api.query(date = (start_date, end_date), filename = filename, producttype = prod_type, platformname = platform)
    else:
      products = api.query(date = (start_date, end_date), producttype = prod_type, platformname = platform)
    
    # download all results from the search
    print("Files will be downloaded to {}".format(os.getcwd()))
    api.download_all(products)
    
Ejemplo n.º 9
0
def search(user, password, geojson, start, end, download, footprints, path, query):
    """Search for Sentinel-1 products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """
    api = SentinelAPI(user, password)
    if query is not None:
        query = dict([i.split('=') for i in query.split(',')])
        api.query(get_coordinates(geojson), start, end, **query)
    else:
        api.query(get_coordinates(geojson), start, end)

    if footprints is True:
        footprints_geojson = api.get_footprints()
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        api.download_all(path)
    else:
        for product in api.get_products():
            print('Product %s - %s' % (product['id'], product['summary']))
Ejemplo n.º 10
0
def search(
        user, password, geojson, start, end, download, md5,
        sentinel1, sentinel2, cloud, footprints, path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """
    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if cloud:
        search_kwargs.update(
            {"platformname": "Sentinel-2",
            "cloudcoverpercentage": "[0 TO %s]" % cloud})
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})

    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    api.query(get_coordinates(geojson), start, end, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.get_footprints()
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        result = api.download_all(path, checksum=md5)
        if md5 is True:
            corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None]
            if len(corrupt_scenes) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for corrupt_tuple in corrupt_scenes:
                        outfile.write("%s : %s\n" % corrupt_tuple)
    else:
        for product in api.get_products():
            print('Product %s - %s' % (product['id'], product['summary']))
        print('---')
        print(
            '%s scenes found with a total size of %.2f GB' %
            (len(api.get_products()), api.get_products_size()))
Ejemplo n.º 11
0
args = parser.parse_args()

print("loading id...")
data = json.load(open(args.data))

startdate = date(data["startdate"][0], data["startdate"][1], data["startdate"][2])
enddate = date(data["enddate"][0], data["enddate"][1], data["enddate"][2])


print("connecting to sentinel API...")
api = SentinelAPI(data["login"], data["password"], 'https://scihub.copernicus.eu/dhus')

# search by polygon, time, and SciHub query keywords
print("searching...")
footprint = geojson_to_wkt(read_geojson(args.geojson))
if args.sentinel == 1:
    products = api.query(footprint,
                         date=(startdate,enddate),
                         platformname = 'Sentinel-1',
                         producttype = "GRD"
                         )
elif args.sentinel == 2:
    products = api.query(footprint,
                        date=(startdate,enddate),
                         platformname = 'Sentinel-2'
                         )
print("  product number: ",len(products))
# download all results from the search
print("downloading...")
api.download_all(products)
Ejemplo n.º 12
0
def search(
        user, password, geojson, start, end, download, md5, sentinel, producttype,
        instrument, sentinel1, sentinel2, cloud, footprints, path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs.update({"platformname": "Sentinel-" + sentinel})

    if instrument and not producttype:
        search_kwargs.update({"instrumentshortname": instrument})

    if producttype:
        search_kwargs.update({"producttype": producttype})

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud})

    # DEPRECATED: to be removed with next major release
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    # DEPRECATED: to be removed with next major release
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    wkt = geojson_to_wkt(read_geojson(geojson))
    products = api.query(wkt, start, end, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path, checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            logger.info('Product %s - %s' % (product_id, props['summary']))
        logger.info('---')
        logger.info(
            '%s scenes found with a total size of %.2f GB' %
            (len(products), api.get_products_size(products)))
Ejemplo n.º 13
0
def download_scenes(period):
    date_from = period.date_from
    date_to = period.date_to

    period_s = '{dfrom}_{dto}'.format(dfrom=period.date_from.strftime("%Y%m"),
                                      dto=period.date_to.strftime("%Y%m"))

    # Check if result has already been done
    scene_filename = f's2_{period_s}*.tif'
    scene_path = os.path.join(RESULTS_PATH, scene_filename)
    if len(glob(scene_path)) == 2:
        print(
            "Sentinel-2 mosaic for period {}-{} already done:".format(
                date_from, date_to), scene_path)
        return

    if not settings.SCIHUB_USER or not settings.SCIHUB_PASS:
        raise "SCIHUB_USER and/or SCIHUB_PASS are not set. " + \
              "Please read the Configuration section on README."

    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    # Search by polygon, time, and Hub query keywords
    footprint = geojson_to_wkt(
        read_geojson(os.path.join(APPDIR, 'data', 'extent.geojson')))

    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-2',
                         cloudcoverpercentage=(0, 20))

    # Skip L2A products
    l2 = []
    for p in products:
        if 'MSIL2A' in products[p]['title']:
            l2.append(p)
    for p in l2:
        products.pop(p)

    for p in products:
        print(products[p]['title'])

    # Filter already downloaded products
    l1c_path = os.path.join(S2_L1C_PATH, period_s)
    os.makedirs(l1c_path, exist_ok=True)
    products_to_download = {
        k: v
        for k, v in products.items() if
        not os.path.exists(os.path.join(l1c_path, '{}.zip'.format(v['title'])))
    }

    # Download products
    api.download_all(products_to_download, directory_path=l1c_path)

    products = list(products.values())

    # Unzip
    for p in products:
        unzip_product(p, period_s)

    # Get the list of L1C products still to be processed to L2A
    l2a_path = os.path.join(S2_L2A_PATH, period_s)
    os.makedirs(l2a_path, exist_ok=True)
    l1c_can_prods = get_canonical_names(glob(os.path.join(l1c_path, '*.SAFE')))
    l2a_can_prods = get_canonical_names(glob(os.path.join(l2a_path, '*.SAFE')))
    missing_l1c_prods = [
        l1c_can_prods[k]
        for k in set(l1c_can_prods.keys()) - set(l2a_can_prods.keys())
    ]

    # Run s2m preprocess (sen2cor) on raw directory
    for p in missing_l1c_prods:
        sen2_preprocess(p, period_s)

    # Build mosaic
    mosaic_path = os.path.join(settings.IMAGES_PATH, 'mosaic', period_s)
    os.makedirs(mosaic_path, exist_ok=True)

    xmin, ymin, xmax, ymax = [
        260572.3994411753083114, 8620358.0515629947185516,
        324439.4877797830849886, 8720597.2414500378072262
    ]
    mosaic_name = 's2_{}{}_{}{}_mosaic'.format(date_from.year, date_from.month,
                                               date_to.year, date_to.month)

    for res in [10, 20]:
        cmd = "python3 {}/mosaic.py -te {} {} {} {} -e 32718 -res {} -n {} -v -o {} {}".format(
            settings.S2M_CLI_PATH, xmin, ymin, xmax, ymax, res, mosaic_name,
            mosaic_path, l2a_path)
        rv = os.system(cmd)
        if rv != 0:
            raise ValueError('s2m mosaic failed')

    generate_vegetation_indexes(mosaic_name, period_s)
    concatenate_results(mosaic_name, period_s)
    clip_results(period_s)

    clean_temp_files(period_s)
Ejemplo n.º 14
0
def cli(
    user,
    password,
    geometry,
    start,
    end,
    uuid,
    name,
    download,
    sentinel,
    producttype,
    instrument,
    cloud,
    footprints,
    path,
    query,
    url,
    order_by,
    limit,
):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            "Missing --user and --password. Please see docs "
            "for environment variables and .netrc support.")

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ["2", "3"]:
            logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split("=") for x in query))

    if geometry is not None:
        search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if "Invalid key" in e.msg:
                    logger.error("No product with ID '%s' exists on server",
                                 productid)
                    exit(1)
                else:
                    raise
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else "(" + " OR ".join(name) + ")"
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        if os.path.isdir(footprints):
            foot_path = os.path.join(footprints, "search_footprints.geojson")
        else:
            foot_path = "search_footprints.geojson"
        if path == ".":
            dump_path = os.path.join(os.getcwd(), foot_path)
        else:
            dump_path = os.path.join(path, foot_path)
        with open(dump_path, "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]["title"]))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info("Product %s - %s", product_id, props["summary"])
            else:  # querying uuids has no summary key
                logger.info(
                    "Product %s - %s - %s MB",
                    product_id,
                    props["title"],
                    round(int(props["size"]) / (1024.0 * 1024.0), 2),
                )
        if uuid is None:
            logger.info("---")
            logger.info(
                "%s scenes found with a total size of %.2f GB",
                len(products),
                api.get_products_size(products),
            )
def download(product, credentials):
    api = SentinelAPI(credentials[0], credentials[1], 'https://scihub.copernicus.eu/dhus')
    api.download_all(product)
    for elem in product:
        os.system('unzip ' + product[elem]['title'] + '.zip')
        os.remove(product[elem]['title'] + '.zip')
Ejemplo n.º 16
0
def search(user, password, geojson, start, end, download, md5, sentinel,
           producttype, instrument, sentinel1, sentinel2, cloud, footprints,
           path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs.update({"platformname": "Sentinel-" + sentinel})

    if instrument and not producttype:
        search_kwargs.update({"instrumentshortname": instrument})

    if producttype:
        search_kwargs.update({"producttype": producttype})

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError(
                'Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud})

    # DEPRECATED: to be removed with next major release
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    # DEPRECATED: to be removed with next major release
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    wkt = geojson_to_wkt(read_geojson(geojson))
    products = api.query(wkt, start, end, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"),
                  "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products,
                                                           path,
                                                           checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"),
                          "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write(
                            "%s : %s\n" %
                            (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            logger.info('Product %s - %s' % (product_id, props['summary']))
        logger.info('---')
        logger.info('%s scenes found with a total size of %.2f GB' %
                    (len(products), api.get_products_size(products)))
Ejemplo n.º 17
0
def sen2_download(products, conf):
    # TODO: Specify download location, file resuming.
    api = SentinelAPI(conf["sen2"]["user"], conf["sen2"]["pass"],
                      'https://scihub.copernicus.eu/dhus')
    api.download_all(products, conf["data"]["out_folder"])
Ejemplo n.º 18
0
class S1Download(object):
    """
    This module makes searching, downloading and retrieving metadata of Sentinel-1 satellite images from the
    Copernicus Open Access Hub easy.

    Parameters
    ----------
    username : str
        Username for Copernicus Open Access Hub
    password : str
        Password for Copernicus Open Access Hub
    region : str
        A geojson file.
    timestart : str
        Start time like "YYYY-MM-DD"
    timeend : str
        End time like "YYYY-MM-DD".
    outdir : str
        Output directory.
    producttype : {'SLC', 'GRD', 'OCN', ''RAW}
        Product type. If None, all types will be recognized.
    polarisationmode tuple or str
        A combination of V and H like ('VH', 'HV') or simple 'VH'.
    sensoroperationalmode : {'SM', 'IW', 'EW', 'WV'}
        Sensor operational mode. If None, all types will be recognized.
    orbitnumber : int
        Orbit number
    orbitdirection : {DESCENDING, ASCENDING}
        Orbit direction. If None, all types will be recognized.

    Attributes
    ----------
    api : object
        Sentinelsat API object.
    outdir : str
    region : wkt
        A geojson to WKT object.
    kwargs : dict
        Dictionary with setted attributes.
    files : DataFrame
        Pandas DataFrame with detected files.

    Methods
    -------
    download()
        Download all files.
    print_products()
        Print all detected files.

    Examples
    --------
    The general usage is
    ::
        $ ds1.download [-p] username=string password=string region=string timestart=string timeend=string outdir=sting
        [*attributes=string] [--verbose] [--quiet]

    For *attributes the following parameters can be used
    ::
        >>> ["producttype", "polarisationmode", "sensoroperationalmode", "orbitnumber", "orbitdirection"]

    Print all Sentinel 1 data with product type GRD between 2015-01-02 and 2015-01-12::
        $ ds1.download -p username=USER password=PASSWORD region=myGEoJsOnFile.geojson timestart=2015-01-02
        timeend=2015-01-12 outdir='home/usr/data' producttype=SLC

    Download the last query
    ::
        $ ds1.download username=USER password=PASSWORD region=myGEoJsOnFile.geojson timestart=2015-01-02
        timeend=2015-01-12 outdir='home/usr/data' producttype=SLC

    Notes
    -----
    **Flags:**
        * p : Print the detected files and exit.
    """
    def __init__(self,
                 username,
                 password,
                 region,
                 timestart,
                 timeend,
                 outdir,
                 producttype=None,
                 polarisationmode=None,
                 sensoroperationalmode=None,
                 orbitnumber=None,
                 orbitdirection=None):

        # Inititalise Sentinel Python API ------------------------------------------------------------------------------
        self.api = SentinelAPI(username, password,
                               'https://scihub.copernicus.eu/dhus')

        # Initialize Directory -----------------------------------------------------------------------------------------
        if not os.path.exists(outdir):
            os.makedirs(outdir)
        else:
            self.outdir = outdir

        # Initialise Mandatory Parameter ------------------------------------------------------------------------------
        self.region = geojson_to_wkt(read_geojson(region))

        # < Reformat Time > ------------
        timestart_split = timestart.split('-')

        timestart_temp = ''
        for item in timestart_split:
            timestart_temp += item

        timeend_split = timeend.split('-')

        timeend_temp = ''
        for item in timeend_split:
            timeend_temp += item

        self.date = (timestart_temp, timeend_temp)

        # Initialize Attributes ----------------------------------------------------------------------------------------
        input_parameter = [
            producttype, polarisationmode, sensoroperationalmode, orbitnumber,
            orbitdirection
        ]
        __KEYS__ = [
            "producttype", "polarisationmode", "sensoroperationalmode",
            "orbitnumber", "orbitdirection"
        ]

        self.kwargs = {}

        for i, item in enumerate(input_parameter):
            if item is not None:
                self.kwargs[__KEYS__[i]] = item

        # Inititalise Sentinel Producs with API ------------------------------------------------------------------------
        self.products = self.api.query(self.region,
                                       date=self.date,
                                       platformname='Sentinel-1',
                                       **self.kwargs)

        self.files = self.api.to_dataframe(self.products)

    def download(self):
        self.api.download_all(self.products, directory_path=self.outdir)
        return 0

    def print_products(self):
        """
        Print all detected files.

        Returns
        -------
        None
        """

        df = self.files.to_string()
        sys.stdout.write(df)
Ejemplo n.º 19
0
def cli(
    user,
    password,
    geometry,
    start,
    end,
    uuid,
    name,
    download,
    quicklook,
    sentinel,
    producttype,
    instrument,
    cloud,
    footprints,
    path,
    query,
    url,
    order_by,
    location,
    limit,
    info,
):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            "Missing --user and --password. Please see docs "
            "for environment variables and .netrc support.")

    api = SentinelAPI(user, password, url)

    if info:
        ctx = click.get_current_context()
        click.echo("DHuS version: " + api.dhus_version)
        ctx.exit()

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ["2", "3"]:
            logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split("=") for x in query))

    if location is not None:
        wkt, info = placename_to_wkt(location)
        minX, minY, maxX, maxY = info["bbox"]
        r = 6371  # average radius, km
        extent_east = r * math.radians(maxX - minX) * math.cos(
            math.radians((minY + maxY) / 2))
        extent_north = r * math.radians(maxY - minY)
        logger.info(
            "Querying location: '%s' with %.1f x %.1f km, %f, %f to %f, %f bounding box",
            info["display_name"],
            extent_north,
            extent_east,
            minY,
            minX,
            maxY,
            maxX,
        )
        search_kwargs["area"] = wkt

    if geometry is not None:
        # check if the value is an existing path
        if os.path.exists(geometry):
            search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))
        # check if the value is a GeoJSON
        else:
            if geometry.startswith("{"):
                try:
                    geometry = json.loads(geometry)
                    search_kwargs["area"] = geojson_to_wkt(geometry)
                except json_parse_exception:
                    raise click.UsageError(
                        "geometry string starts with '{' but is not a valid GeoJSON."
                    )
            # check if the value is a WKT
            elif is_wkt(geometry):
                search_kwargs["area"] = geometry
            else:
                raise click.UsageError(
                    "The geometry input is neither a GeoJSON file with a valid path, "
                    "a GeoJSON String nor a WKT string.")

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except InvalidKeyError:
                logger.error("No product with ID '%s' exists on server",
                             productid)
                exit(1)
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else "(" + " OR ".join(name) + ")"
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"),
                  "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if quicklook:
        downloaded_quicklooks, failed_quicklooks = api.download_all_quicklooks(
            products, path)
        if failed_quicklooks:
            api.logger.warning("Some quicklooks failed: %s out of %s",
                               len(failed_quicklooks), len(products))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]["title"]))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info("Product %s - %s", product_id, props["summary"])
            else:  # querying uuids has no summary key
                logger.info(
                    "Product %s - %s - %s MB",
                    product_id,
                    props["title"],
                    round(int(props["size"]) / (1024.0 * 1024.0), 2),
                )
        if uuid is None:
            logger.info("---")
            logger.info(
                "%s scenes found with a total size of %.2f GB",
                len(products),
                api.get_products_size(products),
            )
Ejemplo n.º 20
0
class Processor():
    def __init__(self,
                 sentinel_user,
                 sentinel_pass,
                 start_date,
                 end_date,
                 dl_dir,
                 input_file,
                 debug=False):
        self.SENTINEL_USER = sentinel_user
        self.SENTINEL_PASS = sentinel_pass
        self.DL_DIR = dl_dir
        self.INPUT_FILE = input_file
        self.START_DATE = start_date
        self.END_DATE = end_date
        self.DEBUG = debug

        if not os.path.exists(self.DL_DIR):
            os.mkdir(self.DL_DIR)

    def phase_1(self):
        self.api = SentinelAPI(self.SENTINEL_USER, self.SENTINEL_PASS)
        self.aoi_footprint = geojson_to_wkt(read_geojson(self.INPUT_FILE))

    def phase_2(self):
        self.api_products = self.api.query(
            self.aoi_footprint,
            date=(self.START_DATE, self.END_DATE),
            area_relation='Intersects',
            platformname='Sentinel-2',
            cloudcoverpercentage=(0, 30),
        )

    def phase_3(self):
        """
        We're doing the conversion from a GeoDataFrame to a list of dictionaries.
        
        After the conversion we intend to use the "footprint" and the "index" columns.

        This step is required because there are multiple products with the same footprint
        and later on we need the index in order to download the images from SentinelAPI.
        """

        self.product_df = self.api.to_dataframe(self.api_products)

        if len(self.product_df.index) == 0:
            raise Exception("No images for selected period")

        self.product_df = self.product_df.sort_values(
            ['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True])
        self.tile_footprints = []
        for x in self.product_df[[
                "size", "tileid", "processinglevel", "footprint"
        ]].T.to_dict().items():
            self.tile_footprints.append({**x[1], "index": x[0]})

        if self.DEBUG:
            pprint(self.tile_footprints[:3])

    def phase_4(self):
        L1 = min_cover_1(self.tile_footprints)
        if self.DEBUG:
            print("{} tiles after the 1st reduction".format(len(L1)))
        L2 = min_cover_2(L1)
        if self.DEBUG:
            print("{} tiles after the 2nd reduction".format(len(L2)))
        self.reduced_footprints = L2

    def phase_5(self):
        dl_indexes = [x["index"] for x in self.reduced_footprints]
        self.api.download_all(dl_indexes, directory_path=self.DL_DIR)

        if self.DEBUG:
            pprint(dl_indexes)

    def phase_6(self):
        """
        We're decompressing the archives unless they're already decompressed.
        """
        for p in pathlib.Path(self.DL_DIR).iterdir():
            p_dir = re.sub('\.zip$', '.SAFE', str(p))
            if os.path.isfile(p) and not os.path.exists(p_dir):
                extract_path = os.path.dirname(p)
                print("Dezarhivare " + str(p))
                with zipfile.ZipFile(p, 'r') as zip_ref:
                    zip_ref.extractall(extract_path)

    def phase_7(self):
        """
        Converting the .jp2 images to .tiff
        """
        def select_files(path, pattern):
            L = []
            for root, dirs, files in os.walk(path):
                if len(dirs) == 0:
                    for f in files:
                        if re.match(pattern, f):
                            L.append(os.path.join(root, f))
            return L

        def convert_to_tiff(paths):
            tiff_paths = []
            for p in paths:
                print("Converting " + p)
                with rasterio.open(p, mode="r") as src:
                    profile = src.meta.copy()
                    profile.update(driver="GTiff")

                    outfile = re.sub(".jp2", ".tiff", p)
                    with rasterio.open(outfile, 'w', **profile) as dst:
                        dst.write(src.read())
                        tiff_paths.append(outfile)
            return tiff_paths

        self.jp2_paths = select_files(self.DL_DIR, ".*_TCI.jp2$")
        self.tiff_paths = convert_to_tiff(self.jp2_paths)

    def phase_8(self):
        """
        We're mergin the raster images.
        """

        raster_list = [
            rasterio.open(f, mode='r', driver="GTiff") for f in self.tiff_paths
        ]
        merged_data, out_trans = rasterio.merge.merge(raster_list)

        if self.DEBUG:
            fig, ax = plt.subplots(figsize=(14, 14))
            show(merged_data, cmap='terrain', ax=ax)

        merged_meta = raster_list[0].meta.copy()
        merged_meta.update({
            "driver": "GTiff",
            "height": merged_data.shape[1],
            "width": merged_data.shape[2],
            "transform": out_trans,
            "crs": raster_list[0].crs,
            "count": 3,
        })
        if self.DEBUG:
            for x in [x.meta for x in raster_list] + [merged_meta]:
                pprint(x)

        self.MERGED_RAW = os.path.join(self.DL_DIR, "merged1.tiff")
        with rasterio.open(self.MERGED_RAW, mode="w", **merged_meta) as dest:
            dest.write(merged_data)

    def phase_9(self):
        """
        Reprojecting the images to  EPSG:4326
        """

        dst_crs = 'EPSG:4326'

        with rasterio.open(self.MERGED_RAW) as src:
            transform, width, height = calculate_default_transform(
                src.crs, dst_crs, src.width, src.height, *src.bounds)
            kwargs = src.meta.copy()
            kwargs.update({
                'crs': dst_crs,
                'transform': transform,
                'width': width,
                'height': height
            })
            self.MERGED_4326 = os.path.join(self.DL_DIR, "merged1_4326.tiff")
            with rasterio.open(self.MERGED_4326, mode="w", **kwargs) as dst:
                for i in range(1, src.count + 1):
                    reproject(source=rasterio.band(src, i),
                              destination=rasterio.band(dst, i),
                              src_transform=src.transform,
                              src_crs=src.crs,
                              dst_transform=transform,
                              dst_crs=dst_crs,
                              resampling=Resampling.nearest)

    def phase_10(self):
        """
        We're clipping the area of interest.
        """

        with rasterio.open(self.MERGED_4326) as src:
            out_image, out_transform = rasterio.mask.mask(
                src, [shapely.wkt.loads(self.aoi_footprint)], crop=True)
            out_meta = src.meta
            out_meta.update({
                "driver": "GTiff",
                "height": out_image.shape[1],
                "width": out_image.shape[2],
                "transform": out_transform,
            })

            self.MERGED_REGION = os.path.join(self.DL_DIR,
                                              "merged1_region.tiff")
            with rasterio.open(self.MERGED_REGION, "w", **out_meta) as dest:
                dest.write(out_image)

                if self.DEBUG:
                    import matplotlib.pyplot as plt
                    fig, ax = plt.subplots(figsize=(14, 14))
                    from rasterio.plot import show
                    show(out_image, cmap='terrain', ax=ax)

    def reset(self):
        """
        We're resetting object state to allow for a subsequent run.
        """
        self.aoi_footprint = None
        self.api_products = None
        self.product_df = None
        self.tile_footprints = None
        self.reduced_footprints = None
        self.jp2_paths = None
        self.tiff_paths = None
        self.MERGED_RAW = None
        self.MERGED_4326 = None
        self.MERGED_REGION = None
Ejemplo n.º 21
0
                     end_date='20170730',
                     platformname='Sentinel-1',
                     producttype='SLC')
print(api._last_query)
print('%s product results for your query. The products need %s Gb disk space'
      ) % (len(products), api.get_products_size(products))

### convert to Pandas DataFrame
products_df = api.to_dataframe(products)
print(products_df.index.values)

### download all query products
path = '/media/nas_data/Thomas/Wallerfing/Sentinel_1_data'
result = api.download_all(products,
                          directory_path=path,
                          max_attempts=10,
                          checksum=True,
                          check_existing=True,
                          show_progress=False)
print('Downloaded files:')
print(result.viewkeys())
"""
Change and/or sort query results
"""
# ### sort and limit to first 5 sorted products
# products_df_sorted = products_df.sort_values(['ingestiondate', 'producttype'], ascending=[True, True])
# products_df_sorted = products_df_sorted.head(1)

# ### download (sorted and reduced) products in order
# path = '/media/tweiss/Daten'
# result={}
# for product_id in products_df_sorted["id"]:
# convert result to Pandas Dataframe
products_df = api.to_dataframe(products)

# create dataframe only for overview
df_overview = products_df[["title"]]
df_string = df_overview.to_string(index=False).split()[1:]
df_new = pd.DataFrame(df_string, columns=["Product"])

# make dates column
dates = df_new["Product"].str[11:19]
dt_series = pd.to_datetime(dates)
df_new["dates"] = dt_series
df_new.sort_values(by=["dates"], inplace=True)

# make tiles column
tiles = df_new["Product"].str[39:44]
df_new["tile"] = tiles
print(df_new)

# request confirmation for download of filtered scenes
proceed = input(
    'Do you want to proceed with the download of the selected scenes? [y/n] ')

if proceed == 'y':
    # path to folder where downloaded scenes will be stored defined by user on the command line
    download_folder = sys.argv[2]
    # actual download of filtered scenes
    api.download_all(products, directory_path=download_folder)
else:
    sys.exit()
Ejemplo n.º 23
0
s1footprints.to_csv(outfile)

###############################################
# download the selected scenes
###############################################
# make a 'data' directory (if it does not exist yet) to where the images will be downloaded
datadir = wd + "data/"
if not os.path.exists(datadir):
    print("Creating directory: ", datadir)
    os.mkdir(datadir)

# change to the 'data' directory
os.chdir(datadir)

# download sorted and reduced products in order
api.download_all(products_df_n['uuid'])

# save the footprints of the scenes marked for download together with their
#   metadata in a Geojson file
# first, run a new query to get the metadata for the selected scenes
products_n = OrderedDict()
for uuid in products_df_n['uuid']:
    kw = query_kwargs.copy()
    kw['uuid'] = uuid
    pp = api.query(**kw)
    products_n.update(pp)

# then, write the footprints and metadata to a geojson file
os.chdir(wd)  # change to the working directory
outfile = 'footprints.geojson'
with open(outfile, 'w') as f:
    products = api.query(footprint,
                         date=(date(y, m, day), date(y, m, day + 1)),
                         producttype='S2MSI1C',
                         platformname='Sentinel-2')

    print(type(products))
    products_df = api.to_dataframe(products)
    cond1 = len(products_df.index) == 2
    cond2 = np.all(
        products_df.loc[:,
                        'relativeorbitnumber'].values == np.asarray([ro, ro]))
    if cond1 and cond2:
        print('There are two scenes of relative Orbit 65 available:')
        print(products_df)
        print('Scenes will be downloaded...')
        api.download_all(products, directory_path=out_dir)
    else:
        print('For date ' + str(d) +
              ' conditions are not met. Following scenes are available:')
        print(products_df)
        print('Will continue with next date...')
        continue

# extract all downloaded data
l1cs = os.listdir(out_dir)
print(l1cs)
for s in l1cs:
    file = os.path.join(out_dir, s)
    with zipfile.ZipFile(file, 'r') as f:
        f.extractall(path=out_dir)
    print(s + ' was successfully extracted.')
Ejemplo n.º 25
0
def download_and_build_composite(date_from, date_to):
    from eo_sensors.utils import clip, rescale_byte, unzip
    from sentinelsat.sentinel import SentinelAPI, geojson_to_wkt, read_geojson

    period_s = '{dfrom}_{dto}'.format(dfrom=date_from.strftime("%Y%m%d"),
                                      dto=date_to.strftime("%Y%m%d"))
    proc_scene_dir = os.path.join(PROC_DIR, period_s)
    tci_path = os.path.join(proc_scene_dir, 'tci.tif')

    if os.path.exists(tci_path):
        logger.info("TCI file already generated at %s", tci_path)
        return tci_path

    if not settings.SCIHUB_USER or not settings.SCIHUB_PASS:
        raise "SCIHUB_USER and/or SCIHUB_PASS are not set. " + \
              "Please read the Configuration section on README."

    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    extent = read_geojson(EXTENT_PATH)
    footprint = geojson_to_wkt(extent)
    logger.info(
        "Query S2MSI2A products with up to %d%% cloud cover from %s to %s",
        MAX_CLOUD_PERC, date_from, date_to)
    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-2',
                         cloudcoverpercentage=(0, MAX_CLOUD_PERC),
                         producttype='S2MSI2A')
    logger.info("Found %d products", len(products))

    raw_dir = os.path.join(RAW_DIR, period_s)
    os.makedirs(raw_dir, exist_ok=True)

    # Filter already downloaded products
    products_to_download = {
        k: v
        for k, v in products.items()
        if not (os.path.exists(
            os.path.join(raw_dir, '{}.zip'.format(v['title']))) or os.path.
                exists(os.path.join(raw_dir, '{}.SAFE'.format(v['title']))))
    }

    # Download products
    if products_to_download:
        logger.info("Download all products (%d)", len(products_to_download))
        api.download_all(products_to_download, directory_path=raw_dir)

    # Unzip compressed files, if there are any
    for p in glob(os.path.join(raw_dir, '*.zip')):
        name, _ = os.path.splitext(os.path.basename(p))
        p_dir = os.path.join(raw_dir, f'{name}.SAFE')
        if not os.path.exists(p_dir):
            logger.info("Unzip %s", p)
            unzip(p, delete_zip=False)

    # Build mosaic
    mosaic_dir = os.path.join(proc_scene_dir, 'mosaic')
    os.makedirs(mosaic_dir, exist_ok=True)
    # FIXME: Read bounds from EXTENT_UTM_PATH
    xmin, ymin, xmax, ymax = [
        261215.0000000000000000, 8620583.0000000000000000,
        323691.8790999995544553, 8719912.0846999995410442
    ]
    cmd = f"python3 {settings.S2M_CLI_PATH}/mosaic.py " \
            f"-te {xmin} {ymin} {xmax} {ymax} " \
            f"-e 32718 -res 10 -v " \
            f"-p {settings.S2M_NUM_JOBS} " \
            f"-o {mosaic_dir} {raw_dir}"
    run_subprocess(cmd)

    # Get mosaic band rasters
    mosaic_rgb_paths = [
        glob(os.path.join(mosaic_dir, f'*_{band}.tif'))
        for band in ['B04', 'B03', 'B02']
    ]
    mosaic_rgb_paths = [p[0] for p in mosaic_rgb_paths if p]
    logger.info("RGB paths: %s", mosaic_rgb_paths)

    # Use gdalbuildvrt to concatenate RGB bands from mosaic
    vrt_path = os.path.join(mosaic_dir, 'tci.vrt')
    cmd = f"gdalbuildvrt -separate {vrt_path} {' '.join(mosaic_rgb_paths)}"
    run_subprocess(cmd)

    # Clip to extent and rescale virtual raster
    clipped_tci_path = os.path.join(mosaic_dir, 'tci.tif')
    clip(src=vrt_path, dst=clipped_tci_path, aoi=EXTENT_UTM_PATH)

    # Rescale image
    rescale_byte(src=clipped_tci_path, dst=tci_path, in_range=(100, 3000))

    return tci_path
Ejemplo n.º 26
0
                  'https://scihub.copernicus.eu/dhus')

# download single scene by known product id
#api.download(<product_id>)

# search by polygon, time, and Hub query keywords
footprint = geojson_to_wkt(read_geojson(aoi_path))

products = api.query(footprint,
                     date=fechas,
                     platformname='Sentinel-2',
                     cloudcoverpercentage=(0, 100))
print(products)

# download all results from the search
result = api.download_all(products)
print(result)

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
#api.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
#api.to_geodataframe(products)

# Get basic information about the product: its title, file size, MD5 sum, date, footprint and
# its download url
#api.get_product_odata(<product_id>)

# Get the product's full metadata available on the server
#api.get_product_odata(<product_id>, full=True)
Ejemplo n.º 27
0
class Command(BaseCommand):
    help = 'Download and process data'

    def add_arguments(self, parser):
        parser.description = """Download and process satellite data for given
        time period."""
        parser.description = """
        User has to setup username and password for scihub.copernicus.eu
        service.

        Either --year and --week or --date has to be specified.
        """
        parser.add_argument('--user',
                            required=True,
                            type=str,
                            help="Copernicus SciHub user name")
        parser.add_argument('--password',
                            required=True,
                            type=str,
                            help="Copernicus SciHub user password")
        parser.add_argument('--date',
                            required=False,
                            type=str,
                            help="""Any day within required week in format
                            YYYYMMDD""")
        parser.add_argument('--week',
                            required=False,
                            type=str,
                            help="Number of week in required year")
        parser.add_argument('--year', required=False, type=str)
        parser.add_argument('--area',
                            required=True,
                            type=str,
                            help="Area name or id")
        parser.add_argument('--clouds',
                            required=True,
                            type=float,
                            help="Cloud coverage")

    def handle(self, *args, **options):
        """
        main method
        """

        user = options['user']
        passw = options['password']
        required_date = required_week = required_year = required_date = 0
        if options['date']:
            required_date = int(options['date'])
        if options['week']:
            required_week = int(options['week'])
        if options['year']:
            required_year = int(options['year'])
        area_name = options['area']
        clouds = options['clouds']

        self.api = SentinelAPI(user, passw,
                               'https://scihub.copernicus.eu/dhus')

        try:
            self.area = Area.objects.get(id=int(area_name))
        except:
            try:
                self.area = Area.objects.get(name=area_name)
            except:
                self.stdout.write(
                    self.style.ERROR(
                        'Given area <{}> does not exist'.format(area_name)))
                sys.exit(1)

        (starting_date, end_date,
         week_nr) = self._get_dates(required_year, required_week,
                                    required_date)

        products = self.get_products(starting_date,
                                     end_date,
                                     self.area,
                                     clouds=clouds)

        if not len(products.items()):
            # TODO save empty week maybe?
            self.stdout.write(
                self.style.WARNING(
                    'There is no data for given time period ' +
                    '<{start}, {end}>, '.format(start=starting_date,
                                                end=end_date) +
                    'maximal cloud cover <{cloud}%> and area <{area}>'.format(
                        area=area_name, cloud=clouds)))
            return

        #self.tempdir = tempfile.mkdtemp(dir="/home/jachym/data/opengeolabs/lifemonitor/")
        #!self.tempdir = "/home/jachym/data/opengeolabs/lifemonitor/tmpq8_15z8f/"
        self.tempdir = tempfile.mkdtemp()
        _TO_BE_CLEANED.append(self.tempdir)

        self.api.download_all(products, self.tempdir)
        products_data = self.get_bands(products)
        patched_bands = self._patch_rasters(products_data)

        analysed_data = self._analyse(patched_bands)

        if Week.objects.filter(date=starting_date,
                               area=self.area).count() == 0:
            week = Week(
                date=starting_date,
                area=self.area,
            )
        else:
            week = Week.objects.get(date=starting_date, area=self.area)

        week.cutline = self.cutline_geom.wkt
        for band in patched_bands:
            band_key = band.lower()
            eval("week.{}".format(band_key)).save(
                os.path.basename(patched_bands[band]),
                File(open(patched_bands[band], "rb")),
                save=True)
        week.save()

        for an in analysed_data:
            at = AnalysisType.objects.get(name=an)
            if Analysis.objects.filter(week=week, type=at).count() == 0:
                analysis = Analysis(week=week, type=at)
            else:
                analysis = Analysis.objects.get(week=week, type=at)

            if analysed_data[an]["raster"]:
                analysis.raster.save(
                    os.path.basename(analysed_data[an]["raster"]),
                    File(open(analysed_data[an]["raster"], "rb")),
                    save=True)

            if analysed_data[an]["image"]:
                analysis.image.save(
                    os.path.basename(analysed_data[an]["image"]),
                    File(open(analysed_data[an]["image"], "rb")),
                    save=True)

            if analysed_data[an]["vector"]:
                analysis.vector.save(
                    os.path.basename(analysed_data[an]["vector"]),
                    File(open(analysed_data[an]["vector"], "rb")),
                    save=True)

            analysis.save()

        self.stdout.write(
            self.style.SUCCESS('Successfully create data for week {}'.format(
                week.week)))

    def save_satellite_image(self, product):
        kwargs = copy.deepcopy(product)
        kwargs.pop("highprobacloudspercentage")
        kwargs.pop("notvegetatedpercentage")
        kwargs.pop("snowicepercentage")
        kwargs.pop("unclassifiedpercentage")
        kwargs.pop("vegetationpercentage")
        kwargs.pop("waterpercentage")
        kwargs.pop("gmlfootprint")
        if kwargs["footprint"].find("POLYGON") == 0:
            geom = shapely.wkt.loads(kwargs["footprint"])
            geom = MultiPolygon([geom])
            kwargs["footprint"] = geom.wkt
        kwargs["frmt"] = kwargs.pop("format")
        kwargs["cloudcoverpercentage"] = kwargs.pop(
            "mediumprobacloudspercentage")
        if SatelliteImage.objects.filter(title=kwargs["title"]).count() == 0:
            img = SatelliteImage(**kwargs)
            img.save()
        else:
            img = SatelliteImage.objects.get(title=kwargs["title"])
        return img

    def _analyse(self, bands):
        """Perform required analysis for given area


        :param bands: dict will all available raster bands
        :return: dict with resulting analysis each analysis should have raster,
        image, vector keys
        """

        data = {}
        for analysis in ["ndvi", "ndwi"]:  # self.area.characteristics:
            mod_analysis = importlib.import_module(
                "varanus.methods.{}".format(analysis))
            data[analysis] = mod_analysis.analyse(bands, self.tempdir)

        return data

    def _patch_rasters(self, products_data):
        """Patch bands together from more products

        :param products_data: dict {product: {band: file_name}}

        :return: dict {band: file_name}
        """

        target_dir = os.path.join(self.tempdir, self.area.name, "merged")
        if not os.path.isdir(target_dir):
            os.makedirs(target_dir)

        products = products_data.keys()

        data = {}
        for band in products_data[list(products)[0]].keys():
            input_files = []

            for product in products_data:
                input_files.append(products_data[product][band])

            output = os.path.join(target_dir, "{}.tif".format(band))

            merge_command = [
                "-n", "0", "-a_nodata", "0", "-o", output, "-co",
                "COMPRESS=DEFLATE"
            ] + input_files
            gm.main(merge_command)

            data[band] = output
        return data

    def _get_final_cutline(self, target, cutline, crs):
        """
        Get final cutline based on input cutline and cloud mask

        :param target: name of target working directory
        :param cultine: geojson file name with input cutline
        :param crs: EPSG:CODE cutline's coordinate reference system

        :return: file name with resulting cutline
        """

        granule = os.path.join(target, "GRANULE")
        qi_data = os.path.join(granule, os.listdir(granule)[0], "QI_DATA")
        clouds_file = os.path.join(qi_data, "MSK_CLOUDS_B00.gml")

        clouds_vectors = []
        clouds_ds = ogr.Open(clouds_file)
        layer = clouds_ds.GetLayer()
        if not layer:
            return cutline
        feature = layer.GetNextFeature()

        while feature is not None:

            geom = feature.GetGeometryRef()
            json_geom = geom.ExportToJson()
            cloud_vectors = shape(json.loads(json_geom))
            feature = layer.GetNextFeature()

        cloud_vectors = unary_union(cloud_vectors)
        cloud_vectors = fiona.transform.transform_geom(crs, "EPSG:4326",
                                                       mapping(cloud_vectors))

        cutline_features = []
        with fiona.open(cutline) as cutline:
            for f in cutline:
                cutline_features.append(shape(f["geometry"]))
        cutline_features = unary_union(cutline_features)

        final_cutline = cutline_features.difference(shape(cloud_vectors))
        if final_cutline.type == "Polygon":
            final_cutline = MultiPolygon([final_cutline])
        self.cutline_geom = final_cutline

        target_file = "{}.geojson".format(os.path.join(target,
                                                       "cloud_cutline"))
        with open(target_file, "w") as out:
            data = {
                "type": "FeatureCollection",
                "features": [{
                    "geometry": mapping(final_cutline)
                }]
            }
            json.dump(data, out)
        return target_file

    def _get_all_band_files(self, target):
        """Create list of all available band files for given product target
        directory

        resulting data structure:

            ```
            {
                "res10m": {
                    "B01": "/path/to/file.jp2",
                    "B02": ...
                },
                "res20m": {
                    ...
                },
                ...
            }
            ```

        :return: data structure
        """

        data = {}
        granule = os.path.join(target, "GRANULE")
        granule_name = os.listdir(granule)[0]

        resolutions = os.listdir(
            os.path.join(granule, granule_name, "IMG_DATA"))

        for res in resolutions:
            data[res] = {}
            images = os.listdir(
                os.path.join(granule, granule_name, "IMG_DATA", res))
            for image in images:
                band_name = image.split("_")[2]
                data[res][band_name] = os.path.join(granule, granule_name,
                                                    "IMG_DATA", res, image)

        return data

    def _cut_bands(self, bands, cutline, target):
        """Cut area of interest based on given cutline

        :param bands: data structure of all bands as returned by
            _get_all_band_files
        :param cutline: filename of required cutline
        :param target: directory, where the resulting data should be uploaded
            to

        :return: same structure as input `bands`, but with cut raster files
        """

        area_dir = os.path.join(target, self.area.name)
        if not os.path.isdir(area_dir):
            os.mkdir(area_dir)

        data = {}
        for res in bands:
            for band in bands[res]:

                new_file = os.path.join(area_dir, "{}.jp2".format(band))
                gdal.Warp(new_file,
                          os.path.join(bands[res][band]),
                          dstSRS="+init=epsg:4326",
                          cropToCutline=True,
                          resampleAlg="near",
                          format="GTiff",
                          cutlineDSName=cutline,
                          dstNodata=0,
                          creationOptions=["COMPRESS=DEFLATE"])

                data[band] = new_file

        return data

    def _get_dates(self, year=None, week=None, date=None):
        """
        :param year: required year
        :param week: required week
        :param date: date as integer

        :return: (start_date, end_date, week_number)
        """

        week_number = None
        start_date = None
        end_date = None

        if date:
            date = int(date)
            if date < 10000000:
                self.stdout.write(
                    self.style.ERROR(
                        'Date <{}> is not in required format YYYYMMDD'.format(
                            date)))
                sys.exit(1)
            year = date // 10000
            month = (date - (year * 10000)) // 100
            day = (date - year * 10000 - month * 100)
            date = datetime.datetime(year, month, day)
            year, week, weekday = date.isocalendar()
            firstday = datetime.timedelta(days=weekday - 1)
            lastday = datetime.timedelta(days=PERIOD - weekday)
            start_date = date - firstday
            end_date = date + lastday
        else:
            first_day_in_year = datetime.date(year, 1, 1)
            first_day_in_week = datetime.timedelta(days=(week - 1) * 7)
            last_day_in_week = datetime.timedelta(days=PERIOD)
            start_date = first_day_in_year + first_day_in_week
            end_date = start_date + last_day_in_week

        return (start_date, end_date, week)

    def get_bands(self, products):
        """
        Get dict with raster bands from downloaded Sentinel products

        :param products: list of products
        :return: dict of raster files for each band
        """

        data = {}
        for pid in products:
            product = products[pid]

            self.save_satellite_image(product)

            title = product["title"]
            filename = product["filename"]
            zipfname = os.path.join(self.tempdir, "{}.zip".format(title))

            with ZipFile(zipfname, 'r') as zipObj:
                zipObj.extractall(path=self.tempdir)

            cutline = self.area.to_geojson_file(self.tempdir)

            product_dir = os.path.join(self.tempdir, filename)

            all_bands = self._get_all_band_files(product_dir)
            crs = self._get_crs_from_band(all_bands["R10m"]["B02"])

            clouds_cutline = self._get_final_cutline(product_dir, cutline, crs)

            bands = self._cut_bands(all_bands, clouds_cutline, product_dir)
            data[product_dir] = bands

        return data

    def _get_crs_from_band(self, raster):
        """Get  EPSG:CODE text string based on input raster file

        :param raster: full file name
        :return: crs "EPSG:<CODE>" text string
        """

        with rio.open(raster) as r:
            return "EPSG:{code}".format(code=r.read_crs().to_epsg())

    def get_products(self, start_date, end_date, area, clouds=100):
        """
        :param start_date: starting date object
        :param end_date: end date object
        :param area: required area object
        """

        products = self.api.query(area.area.wkt,
                                  date=(start_date, end_date),
                                  platformname='Sentinel-2',
                                  cloudcoverpercentage=(0, clouds),
                                  producttype="S2MSI2A")

        return products
Ejemplo n.º 28
0
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Ejemplo n.º 29
0
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path, checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Ejemplo n.º 30
0
#!/usr/bin/env python3
from sentinelsat.sentinel import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date
import os

from_date = date(2018, 11, 1)
to_date   = date(2019,  1, 1)

root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
aoi_path = os.path.join(root_path, 'data', 'aoi_4326.geojson')
download_path = os.path.join(root_path, 'data', 'images', 's1', '_real')

api = SentinelAPI(os.getenv("SCIHUB_USER"), os.getenv("SCIHUB_PASS"), 'https://scihub.copernicus.eu/dhus')

footprint = geojson_to_wkt(read_geojson(aoi_path))
products = api.query(footprint,
                     date=(from_date, to_date),
                     platformname='Sentinel-1',
                     producttype='GRD',
                     polarisationmode='VV VH',
                     orbitdirection='ASCENDING')

for k, p in products.items():
    print((k, p['summary']))

os.makedirs(download_path, exist_ok=True)

results = api.download_all(products, directory_path=download_path)
print(results)