def test_SentinelAPI_wrong_credentials(): api = SentinelAPI("wrong_user", "wrong_password") with pytest.raises(SentinelAPIError) as excinfo: api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2)) assert excinfo.value.http_status == 401 with pytest.raises(SentinelAPIError): api.get_products_size() api.get_products()
def test_SentinelAPI_wrong_credentials(): api = SentinelAPI( "wrong_user", "wrong_password" ) api.query('0 0,1 1,0 1,0 0', datetime(2015, 1, 1), datetime(2015, 1, 2)) assert api.content.status_code == 401 with pytest.raises(ValueError): api.get_products_size() api.get_products()
def test_get_products_size(): api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD']) api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") assert api.get_products_size() == 63.58 api.query_raw( "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(api.get_products()) > 0 # Rounded to zero assert api.get_products_size() == 0
def test_get_products_size(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") assert api.get_products_size(products) == 63.58 # reset products # load new very small query products = api.load_query( "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(products) > 0 # Rounded to zero assert api.get_products_size(products) == 0
def test_get_products_size(): api = SentinelAPI(**_api_auth) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2" ) assert api.get_products_size() == 63.58 # reset products api.products = [] # load new very small query api.load_query("S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E") assert len(api.get_products()) > 0 # Rounded to zero assert api.get_products_size() == 0
def test_get_products_size(): api = SentinelAPI( environ.get('SENTINEL_USER'), environ.get('SENTINEL_PASSWORD') ) api.query( get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2" ) assert api.get_products_size() == 63.58
def search( user, password, tile, geojson, start, end, download, md5, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if cloud: search_kwargs.update( {"platformname": "Sentinel-2", "cloudcoverpercentage": "[0 TO %s]" % cloud}) elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) if tile: api.query(point = get_coordinates(tile = tile), initial_date = start, end_date = end, **search_kwargs) elif geojson: api.query(area = get_coordinates(geojson_file = geojson), initial_date = start, end_date = end, **search_kwargs) else: raise ValueError("Either a --geojson or --tile arguments must be given.") if footprints is True: footprints_geojson = api.get_footprints() with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: result = api.download_all(path, checksum=md5) if md5 is True: corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None] if len(corrupt_scenes) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for corrupt_tuple in corrupt_scenes: outfile.write("%s : %s\n" % corrupt_tuple) else: for product in api.get_products(): print('Product %s - %s' % (product['id'], product['summary'])) print('---') print( '%s scenes found with a total size of %.2f GB' % (len(api.get_products()), api.get_products_size()))
def search( user, password, geojson, start, end, download, md5, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if cloud: search_kwargs.update( {"platformname": "Sentinel-2", "cloudcoverpercentage": "[0 TO %s]" % cloud}) elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) api.query(get_coordinates(geojson), start, end, **search_kwargs) if footprints is True: footprints_geojson = api.get_footprints() with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: result = api.download_all(path, checksum=md5) if md5 is True: corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None] if len(corrupt_scenes) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for corrupt_tuple in corrupt_scenes: outfile.write("%s : %s\n" % corrupt_tuple) else: for product in api.get_products(): print('Product %s - %s' % (product['id'], product['summary'])) print('---') print( '%s scenes found with a total size of %.2f GB' % (len(api.get_products()), api.get_products_size()))
'password', api_url='https://scihub.copernicus.eu/apihub/') ### Sentinel data search # products = api.query(area=get_coordinates('/media/tweiss/Work/coordinates_wallerfing.geojson'), initial_date='20151219', end_date=datetime(2015, 12, 24), platformname='Sentinel-1', producttype='GRD') # products = api.query(get_area(48.68, 48.70, 12.89 ,12.999), '20151219', datetime(2015, 12, 29), platformname='Sentinel-1') products = api.query(area=get_coordinates( '/media/tweiss/Work/python_code/MULTIPLY/sentinel_download/coordinates_test_sites/coordinates_test_sites' ), initial_date='20170628', end_date='20170730', platformname='Sentinel-1', producttype='SLC') print(api._last_query) print('%s product results for your query. The products need %s Gb disk space' ) % (len(products), api.get_products_size(products)) ### convert to Pandas DataFrame products_df = api.to_dataframe(products) print(products_df.index.values) ### download all query products path = '/media/nas_data/Thomas/Wallerfing/Sentinel_1_data' result = api.download_all(products, directory_path=path, max_attempts=10, checksum=True, check_existing=True, show_progress=False) print('Downloaded files:') print(result.viewkeys())
logging.basicConfig(format='%(message)s', level='DEBUG') # connect to the API api = SentinelAPI('s3guest', 's3guest', 'https://scihub.copernicus.eu/s3') # download single scene by known product id #api.download(<product_id>) # search by polygon, time, and SciHub query keywords #products = api.query(,'20151219', date(2015, 12, 29), platformname = 'Sentinel-2', cloudcoverpercentage = '[0 TO 30]'}) products = api.query(get_coordinates('map.geojson'), initial_date=datetime(2017, 2, 24), end_date=datetime(2017, 2, 25)) #api.get_products_size(products) print(api.get_products_size(products)) # download all results from the search #api.download_all(products) # GeoJSON FeatureCollection containing footprints and metadata of the scenes fp = api.get_footprints(products) with open('footprints.json', 'w') as outfile: #json.dump({'numbers':n, 'strings':s, 'x':x, 'y':y}, outfile, indent=4) json.dump(fp, outfile, indent=4) with open('product.json', 'w') as outfile: #json.dump({'numbers':n, 'strings':s, 'x':x, 'y':y}, outfile, indent=4) json.dump(products, outfile, indent=4)
def search(user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError( 'Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write( "%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info('%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
def cli( user, password, geometry, start, end, uuid, name, download, quicklook, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, location, limit, info, ): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() if user is None or password is None: try: user, password = requests.utils.get_netrc_auth(url) except TypeError: pass if user is None or password is None: raise click.UsageError( "Missing --user and --password. Please see docs " "for environment variables and .netrc support.") api = SentinelAPI(user, password, url) if info: ctx = click.get_current_context() click.echo("DHuS version: " + api.dhus_version) ctx.exit() search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ["2", "3"]: logger.error("Cloud cover is only supported for Sentinel 2 and 3.") exit(1) search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split("=") for x in query)) if location is not None: wkt, info = placename_to_wkt(location) minX, minY, maxX, maxY = info["bbox"] r = 6371 # average radius, km extent_east = r * math.radians(maxX - minX) * math.cos( math.radians((minY + maxY) / 2)) extent_north = r * math.radians(maxY - minY) logger.info( "Querying location: '%s' with %.1f x %.1f km, %f, %f to %f, %f bounding box", info["display_name"], extent_north, extent_east, minY, minX, maxY, maxX, ) search_kwargs["area"] = wkt if geometry is not None: # check if the value is an existing path if os.path.exists(geometry): search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry)) # check if the value is a GeoJSON else: if geometry.startswith("{"): try: geometry = json.loads(geometry) search_kwargs["area"] = geojson_to_wkt(geometry) except json_parse_exception: raise click.UsageError( "geometry string starts with '{' but is not a valid GeoJSON." ) # check if the value is a WKT elif is_wkt(geometry): search_kwargs["area"] = geometry else: raise click.UsageError( "The geometry input is neither a GeoJSON file with a valid path, " "a GeoJSON String nor a WKT string.") if uuid is not None: uuid_list = [x.strip() for x in uuid] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except InvalidKeyError: logger.error("No product with ID '%s' exists on server", productid) exit(1) elif name is not None: search_kwargs["identifier"] = name[0] if len( name) == 1 else "(" + " OR ".join(name) + ")" products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if quicklook: downloaded_quicklooks, failed_quicklooks = api.download_all_quicklooks( products, path) if failed_quicklooks: api.logger.warning("Some quicklooks failed: %s out of %s", len(failed_quicklooks), len(products)) if download is True: product_infos, triggered, failed_downloads = api.download_all( products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"])) else: for product_id, props in products.items(): if uuid is None: logger.info("Product %s - %s", product_id, props["summary"]) else: # querying uuids has no summary key logger.info( "Product %s - %s - %s MB", product_id, props["title"], round(int(props["size"]) / (1024.0 * 1024.0), 2), ) if uuid is None: logger.info("---") logger.info( "%s scenes found with a total size of %.2f GB", len(products), api.get_products_size(products), )
def main( producttype, aoi, date, qa, unit, resolution, chunk_size, num_threads, num_workers, ): api = SentinelAPI(DHUS_USER, DHUS_PASSWORD, DHUS_URL) tqdm.write("\nRequesting products\n") query_body = { "date": date, "platformname": "Sentinel-5 Precursor", "producttype": producttype } # query database if aoi is None: products = api.query(**query_body) else: footprint = geojson_to_wkt(read_geojson(Path(aoi))) products = api.query(footprint, **query_body) # display results tqdm.write(("Number of products found: {number_product}\n" "Total products size: {size:.2f} GB\n").format( number_product=len(products), size=api.get_products_size(products))) # list of uuids for each product in the query ids_request = list(products.keys()) if len(ids_request) == 0: tqdm.write("Done!") sys.exit(0) # list of downloaded filenames urls filenames = [ DOWNLOAD_DIR / f"{products[file_id]['title']}.nc" for file_id in ids_request ] makedirs(DOWNLOAD_DIR, exist_ok=True) with ThreadPool(num_threads) as pool: pool.map( partial(fetch_product, api=api, products=products, download_dir=DOWNLOAD_DIR), ids_request) pool.close() pool.join() tqdm.write("Converting into L3 products\n") # Step size for spatial re-gridding (in degrees) xstep, ystep = resolution if aoi is None: minx, miny, maxx, maxy = -180, -90, 180, 90 else: minx, miny, maxx, maxy = geopandas.read_file( Path(aoi)).bounds.values.squeeze() # computes offsets and number of samples lat_length, lat_offset, lon_length, lon_offset = compute_lengths_and_offsets( minx, miny, maxx, maxy, ystep, xstep) harp_commands = generate_harp_commands(producttype, qa, unit, xstep, ystep, lat_length, lat_offset, lon_length, lon_offset) makedirs(EXPORT_DIR, exist_ok=True) tqdm.write(f"Launched {num_workers} processes") with Pool(processes=num_workers) as pool: list( tqdm( pool.imap_unordered( partial(process_file, harp_commands=harp_commands, export_dir=EXPORT_DIR), filenames, ), desc="Converting", leave=False, total=len(filenames), )) pool.close() pool.join() # Recover attributes attributes = { filename.name: { "time_coverage_start": xr.open_dataset(filename).attrs["time_coverage_start"], "time_coverage_end": xr.open_dataset(filename).attrs["time_coverage_end"], } for filename in filenames } tqdm.write("Processing data\n") xr.set_options(keep_attrs=True) DS = xr.open_mfdataset( [ str(filename.relative_to(".")).replace("L2", "L3") for filename in filenames if exists(str(filename.relative_to(".")).replace("L2", "L3")) ], combine="nested", concat_dim="time", parallel=True, preprocess=partial(preprocess_time, attributes=attributes), decode_times=False, chunks={"time": chunk_size}, ) DS = DS.sortby("time") DS.rio.write_crs("epsg:4326", inplace=True) DS.rio.set_spatial_dims(x_dim="longitude", y_dim="latitude", inplace=True) tqdm.write("Exporting netCDF file\n") start = min(products[uuid]["beginposition"] for uuid in products.keys()) end = max(products[uuid]["endposition"] for uuid in products.keys()) export_dir = PROCESSED_DIR / f"processed{producttype[2:]}" makedirs(export_dir, exist_ok=True) file_export_name = export_dir / ( f"{producttype[4:]}{start.day}-{start.month}-{start.year}__" f"{end.day}-{end.month}-{end.year}.nc") DS.to_netcdf(file_export_name) tqdm.write("Done!")
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
def cli( user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit, ): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() if user is None or password is None: try: user, password = requests.utils.get_netrc_auth(url) except TypeError: pass if user is None or password is None: raise click.UsageError( "Missing --user and --password. Please see docs " "for environment variables and .netrc support.") api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ["2", "3"]: logger.error("Cloud cover is only supported for Sentinel 2 and 3.") exit(1) search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split("=") for x in query)) if geometry is not None: search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if "Invalid key" in e.msg: logger.error("No product with ID '%s' exists on server", productid) exit(1) else: raise elif name is not None: search_kwargs["identifier"] = name[0] if len( name) == 1 else "(" + " OR ".join(name) + ")" products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) if os.path.isdir(footprints): foot_path = os.path.join(footprints, "search_footprints.geojson") else: foot_path = "search_footprints.geojson" if path == ".": dump_path = os.path.join(os.getcwd(), foot_path) else: dump_path = os.path.join(path, foot_path) with open(dump_path, "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, triggered, failed_downloads = api.download_all( products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"])) else: for product_id, props in products.items(): if uuid is None: logger.info("Product %s - %s", product_id, props["summary"]) else: # querying uuids has no summary key logger.info( "Product %s - %s - %s MB", product_id, props["title"], round(int(props["size"]) / (1024.0 * 1024.0), 2), ) if uuid is None: logger.info("---") logger.info( "%s scenes found with a total size of %.2f GB", len(products), api.get_products_size(products), )
def search( user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info( '%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
swathidentifer cloudcoverpercentage (e.g. '[0 TO 40]') for possible values and probably more parameters please visit https://scihub.copernicus.eu/twiki/do/view/SciHubUserGuide/3FullTextSearch#Search_Keywords """ print(datetime.now()) ### login information Copernicus Open Access Hub (https://scihub.copernicus.eu/dhus/#/home) api = SentinelAPI('user', 'password', api_url='https://scihub.copernicus.eu/apihub/') ### Sentinel data search # products = api.query(area=get_coordinates('/media/tweiss/Work/coordinates_wallerfing.geojson'), initial_date='20151219', end_date=datetime(2015, 12, 24), platformname='Sentinel-1', producttype='GRD') products = api.query(get_area(48.68, 48.70, 12.89 ,12.999), '20151219', datetime(2015, 12, 29), platformname='Sentinel-1') print(api._last_query) print('%s product results for your query. The products need %s Gb disk space') % (len(products), api.get_products_size(products)) ### convert to Pandas DataFrame products_df = api.to_dataframe(products) print(products_df.index.values) ### download all query products path = '/media/tweiss/Daten' result = api.download_all(products, directory_path=path, max_attempts=10, checksum=True, check_existing=True, show_progress=False) print('Downloaded files:') print(result.viewkeys()) """ Change and/or sort query results """
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
# download single scene by known product id #api.download(<product_id>) # search by polygon, time, and SciHub query keywords #products = api.query(,'20151219', date(2015, 12, 29), platformname = 'Sentinel-2', cloudcoverpercentage = '[0 TO 30]'}) #products = api.query(get_coordinates('map.geojson'), footprint = geojson_to_wkt(read_geojson('map.geojson')) products = api.query(footprint, date=('NOW-48HOURS', 'NOW'),platformname = 'Sentinel-3', processinglevel="1") #print(products) #api.get_products_size(products) print("PRODUCT SIZE: "+ str(api.get_products_size(products))) # download all results from the search #api.download_all(products) # GeoJSON FeatureCollection containing footprints and metadata of the scenes fp=api.to_geojson(products) #print(fp) with open('footprints.json', 'w') as outfile: #json.dump({'numbers':n, 'strings':s, 'x':x, 'y':y}, outfile, indent=4) json.dump(fp, outfile, indent=4) i=0