def test_footprints_s2(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), "20151219", "20151228", platformname="Sentinel-2") with open('tests/expected_search_footprints_s2.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.to_geojson(products)) == set(expected_footprints)
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query(get_coordinates('tests/map.geojson'), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD") with open('tests/expected_search_footprints_s1.geojson', 'r') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(api.to_geojson(products)) == set(expected_footprints)
def search( user, password, geojson, start, end, download, md5, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if cloud: search_kwargs.update( {"platformname": "Sentinel-2", "cloudcoverpercentage": "[0 TO %s]" % cloud}) elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) products = api.query(get_coordinates(geojson), start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: result = api.download_all(products, path, checksum=md5) if md5 is True: corrupt_scenes = [(path, info["id"]) for path, info in result.items() if info is not None] if len(corrupt_scenes) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for corrupt_tuple in corrupt_scenes: outfile.write("%s : %s\n" % corrupt_tuple) else: for product in products: logger.info('Product %s - %s' % (product['id'], product['summary'])) logger.info('---') logger.info( '%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
def search(user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError( 'Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write( "%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info('%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
def cli( user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit, ): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() if user is None or password is None: try: user, password = requests.utils.get_netrc_auth(url) except TypeError: pass if user is None or password is None: raise click.UsageError( "Missing --user and --password. Please see docs " "for environment variables and .netrc support.") api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ["2", "3"]: logger.error("Cloud cover is only supported for Sentinel 2 and 3.") exit(1) search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split("=") for x in query)) if geometry is not None: search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if "Invalid key" in e.msg: logger.error("No product with ID '%s' exists on server", productid) exit(1) else: raise elif name is not None: search_kwargs["identifier"] = name[0] if len( name) == 1 else "(" + " OR ".join(name) + ")" products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) if os.path.isdir(footprints): foot_path = os.path.join(footprints, "search_footprints.geojson") else: foot_path = "search_footprints.geojson" if path == ".": dump_path = os.path.join(os.getcwd(), foot_path) else: dump_path = os.path.join(path, foot_path) with open(dump_path, "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, triggered, failed_downloads = api.download_all( products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"])) else: for product_id, props in products.items(): if uuid is None: logger.info("Product %s - %s", product_id, props["summary"]) else: # querying uuids has no summary key logger.info( "Product %s - %s - %s MB", product_id, props["title"], round(int(props["size"]) / (1024.0 * 1024.0), 2), ) if uuid is None: logger.info("---") logger.info( "%s scenes found with a total size of %.2f GB", len(products), api.get_products_size(products), )
# save the footprints of the scenes marked for download together with their # metadata in a Geojson file # first, run a new query to get the metadata for the selected scenes products_n = OrderedDict() for uuid in products_df_n['uuid']: kw = query_kwargs.copy() kw['uuid'] = uuid pp = api.query(**kw) products_n.update(pp) # then, write the footprints and metadata to a geojson file os.chdir(wd) # change to the working directory outfile = 'footprints.geojson' with open(outfile, 'w') as f: json.dump(api.to_geojson(products_n), f) ############################################### # unzip the downloaded files ############################################### # get list of all zip files in the data directory os.chdir(datadir) # change to the data directory allfiles = [f for f in listdir(datadir) if isfile(join(datadir, f))] # unzip all files for x in range(len(allfiles)): if allfiles[x].split(".")[1] == "zip": print("Unzipping file ", x + 1, ": ", allfiles[x]) os.system("unzip " + allfiles[x]) # remove zip file after extraction os.remove(allfiles[x])
class Sentinel1(Datasource): tags = ['SAR', 'Satellite', 'Raster'] def __init__(self, manifest): super().__init__(manifest) self.api = SentinelAPI(os.getenv('COPERNICUS_USER'), os.getenv('COPERNICUS_PASSWORD')) self.api.api_url = "https://scihub.copernicus.eu/dhus/" def search(self, spatial, temporal=None, properties=None, limit=10, **kwargs): stac_query = STACQuery(spatial, temporal) query_body = {'area': stac_query.wkt(), 'limit': limit, 'platformname': 'Sentinel-1', } if temporal: query_body.update({'date': stac_query.temporal}) if properties: api_props = {} for prop in properties: api_props.update(stac_to_api[prop](properties)) query_body.update(api_props) self.manifest.searches.append([self,query_body]) def execute(self, query): epsg_check = query.pop('epsg') if 'epsg' in list(query) else None products = self.api.query(**query) response = self.api.to_geojson(products) stac_items = [] for feat in response['features']: stac_props = {} # Calculate bbox from coords if feat['geometry']['type'] == 'MultiPolygon': xcoords = [x[0] for x in feat['geometry']['coordinates'][0][0]] ycoords = [y[1] for y in feat['geometry']['coordinates'][0][0]] feat['geometry']['coordinates'] = feat['geometry']['coordinates'][0] feat['geometry']['type'] = 'Polygon' else: xcoords = [x[0] for x in feat['geometry']['coordinates'][0]] ycoords = [y[1] for y in feat['geometry']['coordinates'][0]] feat.update({"bbox": [min(xcoords), min(ycoords), max(xcoords), max(ycoords)]}) # Find EPSG of WGS84 UTM zone from centroid of bbox centroid = [(feat['bbox'][1] + feat['bbox'][3]) / 2, (feat['bbox'][0] + feat['bbox'][2]) / 2] utm_zone = utm.from_latlon(*centroid) epsg = '32' + '5' + str(utm_zone[2]) if centroid[0] < 0 else '32' + '6' + str(utm_zone[2]) stac_props.update({'eo:epsg': int(epsg)}) if epsg_check: if int(epsg) != epsg_check: continue # Replace properties with STAC properties for prop in feat['properties']: if prop in list(api_to_stac): stac_props.update(api_to_stac[prop](feat['properties'])) feat['properties'] = stac_props # Move assets from properties to feature feat.update({"assets": {"analytic": feat['properties'].pop("asset_analytic"), "thumbnail": feat['properties'].pop("asset_thumbnail")}}) # Update ID feat.update({"id": stac_props.pop("id")}) # Validate STAC item STACItem.load(feat) stac_items.append(feat) return stac_items
def search( user, password, geojson, start, end, download, md5, sentinel, producttype, instrument, sentinel1, sentinel2, cloud, footprints, path, query, url): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your SciHub user and password, you must pass a geojson file containing the polygon of the area you want to search for. If you don't specify the start and end dates, it will search in the last 24 hours. """ api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs.update({"platformname": "Sentinel-" + sentinel}) if instrument and not producttype: search_kwargs.update({"instrumentshortname": instrument}) if producttype: search_kwargs.update({"producttype": producttype}) if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud}) # DEPRECATED: to be removed with next major release elif sentinel2: search_kwargs.update({"platformname": "Sentinel-2"}) logger.info('DEPRECATED: Please use --sentinel instead') # DEPRECATED: to be removed with next major release elif sentinel1: search_kwargs.update({"platformname": "Sentinel-1"}) logger.info('DEPRECATED: Please use --sentinel instead') if query is not None: search_kwargs.update(dict([i.split('=') for i in query.split(',')])) wkt = geojson_to_wkt(read_geojson(geojson)) products = api.query(wkt, start, end, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path, checksum=md5) if md5 is True: if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): logger.info('Product %s - %s' % (product_id, props['summary'])) logger.info('---') logger.info( '%s scenes found with a total size of %.2f GB' % (len(products), api.get_products_size(products)))
def cli( user, password, geometry, start, end, uuid, name, download, quicklook, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, location, limit, info, ): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() if user is None or password is None: try: user, password = requests.utils.get_netrc_auth(url) except TypeError: pass if user is None or password is None: raise click.UsageError( "Missing --user and --password. Please see docs " "for environment variables and .netrc support.") api = SentinelAPI(user, password, url) if info: ctx = click.get_current_context() click.echo("DHuS version: " + api.dhus_version) ctx.exit() search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ["2", "3"]: logger.error("Cloud cover is only supported for Sentinel 2 and 3.") exit(1) search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split("=") for x in query)) if location is not None: wkt, info = placename_to_wkt(location) minX, minY, maxX, maxY = info["bbox"] r = 6371 # average radius, km extent_east = r * math.radians(maxX - minX) * math.cos( math.radians((minY + maxY) / 2)) extent_north = r * math.radians(maxY - minY) logger.info( "Querying location: '%s' with %.1f x %.1f km, %f, %f to %f, %f bounding box", info["display_name"], extent_north, extent_east, minY, minX, maxY, maxX, ) search_kwargs["area"] = wkt if geometry is not None: # check if the value is an existing path if os.path.exists(geometry): search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry)) # check if the value is a GeoJSON else: if geometry.startswith("{"): try: geometry = json.loads(geometry) search_kwargs["area"] = geojson_to_wkt(geometry) except json_parse_exception: raise click.UsageError( "geometry string starts with '{' but is not a valid GeoJSON." ) # check if the value is a WKT elif is_wkt(geometry): search_kwargs["area"] = geometry else: raise click.UsageError( "The geometry input is neither a GeoJSON file with a valid path, " "a GeoJSON String nor a WKT string.") if uuid is not None: uuid_list = [x.strip() for x in uuid] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except InvalidKeyError: logger.error("No product with ID '%s' exists on server", productid) exit(1) elif name is not None: search_kwargs["identifier"] = name[0] if len( name) == 1 else "(" + " OR ".join(name) + ")" products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if quicklook: downloaded_quicklooks, failed_quicklooks = api.download_all_quicklooks( products, path) if failed_quicklooks: api.logger.warning("Some quicklooks failed: %s out of %s", len(failed_quicklooks), len(products)) if download is True: product_infos, triggered, failed_downloads = api.download_all( products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"])) else: for product_id, props in products.items(): if uuid is None: logger.info("Product %s - %s", product_id, props["summary"]) else: # querying uuids has no summary key logger.info( "Product %s - %s - %s MB", product_id, props["title"], round(int(props["size"]) / (1024.0 * 1024.0), 2), ) if uuid is None: logger.info("---") logger.info( "%s scenes found with a total size of %.2f GB", len(products), api.get_products_size(products), )
# footprint='intersects(22.951, -15.268)' # any scene that intersects a central point lon, lat # search by polygon, time, and Hub query keywords footprint = geojson_to_wkt(read_geojson(jsonAOI)) products = api.query(footprint=footprint, beginposition=beginposition, platformname=platformname, producttype=producttype) # query the product list # items=list(products.items()) # download all results from the search print('Downloading all products...') api.download_all(products) ''' # GeoJSON FeatureCollection containing footprints and metadata of the scenes api.to_geojson(products) # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries api.to_geopandas(products) # Get basic information about the product: its title, file size, MD5 sum, date, footprint and # its download url api.get_product_odata(<product_id>) # Get the product's full metadata available on the server api.get_product_odata(<product_id>, full=True) '''
print("AOI - "+aoi) directorio = proyecto+"_"+order_aoi+"_"+name if not os.path.exists("D:\\datos\\Images\\"+directorio): os.mkdir("D:\\datos\\Images\\"+directorio) ############################################################################################## # connect to the API api = SentinelAPI('josem.sanchezm.ccaa', 'JMSMjmsm2412', 'https://scihub.copernicus.eu/dhus') # search by polygon, time, and Hub query keywords #footprint = geojson_to_wkt(read_geojson(geom)) products = api.query(geom, platformname = 'Sentinel-2', cloudcoverpercentage = (0, 25),producttype='S2MSI1C',beginPosition='[2019-01-01T00:00:00.000Z TO '+year+'-01-01T23:59:59.999Z]') json = api.to_geojson(products) for i in json["features"]: title = i['properties']['title'] summary = i['properties']['summary'] ingestiondate = i['properties']['ingestiondate'] beginposition = i['properties']['beginposition'] endposition = i['properties']['endposition'] orbitnumber = str(i['properties']['orbitnumber']) relativeorbitnumber = str(i['properties']['relativeorbitnumber']) cloudcoverpercentage = str(i['properties']['cloudcoverpercentage']) filename = i['properties']['filename'] format = i['properties']['format'] identifier = i['properties']['identifier'] instrumentshortname = i['properties']['instrumentshortname'] instrumentname = i['properties']['instrumentname']
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype, instrument, cloud, footprints, path, query, url, order_by, limit): """Search for Sentinel products and, optionally, download all the results and/or create a geojson file with the search result footprints. Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file containing the geometry of the area you want to search for or the UUIDs of the products. If you don't specify the start and end dates, it will search in the last 24 hours. """ _set_logger_handler() api = SentinelAPI(user, password, url) search_kwargs = {} if sentinel and not (producttype or instrument): search_kwargs["platformname"] = "Sentinel-" + sentinel if instrument and not producttype: search_kwargs["instrumentshortname"] = instrument if producttype: search_kwargs["producttype"] = producttype if cloud: if sentinel not in ['2', '3']: logger.error('Cloud cover is only supported for Sentinel 2 and 3.') raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.') search_kwargs["cloudcoverpercentage"] = (0, cloud) if query is not None: search_kwargs.update((x.split('=') for x in query.split(','))) if geometry is not None: search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry)) if uuid is not None: uuid_list = [x.strip() for x in uuid.split(',')] products = {} for productid in uuid_list: try: products[productid] = api.get_product_odata(productid) except SentinelAPIError as e: if 'Invalid key' in e.msg: logger.error('No product with ID \'%s\' exists on server', productid) elif name is not None: search_kwargs["identifier"] = name products = api.query(order_by=order_by, limit=limit, **search_kwargs) else: start = start or "19000101" end = end or "NOW" products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs) if footprints is True: footprints_geojson = api.to_geojson(products) with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile: outfile.write(gj.dumps(footprints_geojson)) if download is True: product_infos, failed_downloads = api.download_all(products, path) if len(failed_downloads) > 0: with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile: for failed_id in failed_downloads: outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title'])) else: for product_id, props in products.items(): if uuid is None: logger.info('Product %s - %s', product_id, props['summary']) else: # querying uuids has no summary key logger.info('Product %s - %s - %s MB', product_id, props['title'], round(int(props['size']) / (1024. * 1024.), 2)) if uuid is None: logger.info('---') logger.info('%s scenes found with a total size of %.2f GB', len(products), api.get_products_size(products))
print("ProductId Filename Size") for prod_id in products: print("%s %s %s" % (prod_id, products[prod_id]["filename"], products[prod_id]["size"])) dir_actual = path = os.getcwd() dir_dados = dir_actual + "/" + data_str #str_data_ini print("A criar pasta: " + dir_dados) try: os.mkdir(dir_dados) except OSError: print("Falha ao criar directoria %s." % dir_dados) #sys.exit(0) #criar ficheiro de footprints.geojson para usar na mascara de transparencias oGeoJSON = api.to_geojson(products) with open(dir_dados + '/search_footprints.geojson', 'w') as outfile: json.dump(oGeoJSON, outfile) # connect to the api api_session = api.session api_url = "https://scihub.copernicus.eu/apihub/odata/v1/" #obter os quicklooks api.download_all_quicklooks(products) #chamar o download de produtos usando 2 threads! pool = ThreadPoolExecutor(max_workers=2) for download in pool.map(download_prods_bands, products): print(download)