Beispiel #1
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        assert not footprint['geometry'].errors()

    with open(FIXTURES_DIR + '/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #2
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        assert not footprint['geometry'].errors()

    with open(FIXTURES_DIR + '/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
def test_footprints_s2(products, fixture_path):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints["features"]:
        assert not footprint["geometry"].errors()

    with open(fixture_path("expected_search_footprints_s2.geojson")) as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #4
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #5
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #6
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')),
        (datetime(2014, 10, 10), datetime(2014, 12, 31)), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        assert not footprint['geometry'].errors()

    with open(FIXTURES_DIR + '/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #7
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')),
        (datetime(2014, 10, 10), datetime(2014, 12, 31)), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        assert not footprint['geometry'].errors()

    with open(FIXTURES_DIR + '/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #8
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(geojson_to_wkt(read_geojson('tests/map.geojson')),
                         datetime(2014, 10, 10),
                         datetime(2014, 12, 31),
                         producttype="GRD")

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #9
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson('tests/map.geojson')),
        datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Beispiel #10
0
def main(options):
    api = SentinelAPI('bartulo', 'ventanuco')
    footprint = geojson_to_wkt(read_geojson(options.geojson))
    products = api.query( footprint, 
                          date = ( date.today() - timedelta(options.days), date.today() + timedelta(1)),
                          producttype = 'S2MSI2A',
                          platformname = 'Sentinel-2')

    data = api.to_geojson(products)['features']

    question_name = 'capa'
    questions = [
            inquirer.List(
                question_name,
                message = 'Imagenes disponibles',
                choices = ["Id: %s - Fecha: %s - Cobertura de nubes:%.4s%%" % (i, data[i]['properties']['beginposition'], data[i]['properties']['cloudcoverpercentage']) for i in range(len(data))],
                ),
            ]

    answers = inquirer.prompt(questions)
    index = int(answers[question_name].split('-')[0].split(':')[1].replace(' ', ''))
    baseURL = data[index]['properties']['link_alternative']
    filename = data[index]['properties']['filename']
                
    api_session = requests.Session()
    api_session.auth = ('bartulo', 'ventanuco')
    granules = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes?$format=json" % (baseURL, filename)).json()
    granules_id = granules['d']['results'][0]['Id']
    print("%s/Nodes('%s')/Nodes('GRANULE')/Nodes?$format=json" % (baseURL, filename))

    bands_10m = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R10m')/Nodes?$format=json" % (baseURL, filename, granules_id)).json()
    band8 = bands_10m['d']['results'][4]['__metadata']['media_src']
    bandColor = bands_10m['d']['results'][5]['__metadata']['media_src']

    bands_20m = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R20m')/Nodes?$format=json" % (baseURL, filename, granules_id)).json()
    band12 = bands_20m['d']['results'][8]['__metadata']['media_src']
    print("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R20m')/Nodes?$format=json" % (baseURL, filename, granules_id))

#    download(band12, 'banda12.jp2', api_session)
#    download(band8, 'banda8.jp2', api_session)
#    download(bandColor, 'color.jp2', api_session)

    banda8 = gdal.Open('banda8.jp2')
    banda12 = gdal.Open('banda12.jp2')

    b8 = banda8.ReadAsArray()
    b12 = banda12.ReadAsArray()
Beispiel #11
0
class Downloader:
    def __init__(self, str_username, str_password, str_link):
        self.api = SentinelAPI(str_username, str_password, str_link)
        self.products = None

    def search_polygon(self, footprint: object, str_date_start: str,
                       str_date_end: str, str_platform_name: str, percentage: object):
        print('searching')
        self.products = self.api.query(footprint,
                                       date=(str_date_start, str_date_end),
                                       platformname=str_platform_name,
                                       cloudcoverpercentage=(percentage[0], percentage[1]))
        size = self.api.get_products_size(self.products)
        print(f'found {size}GiB of data')
        # print(self.products)

    def download_zip(self, path):
        self.api.download_all(self.products, path, max_attempt, True)

    def download_products(self, path, download_file):
        if download_file:
            self.download_zip(path)
        print('downloaded')
        df_products = self.api.to_dataframe(self.products)
        return df_products

    def download_geoproduct(self, path, download_file):
        if download_file:
            self.download_zip(path)
        # print('download Geos')
        gdf_products = self.api.to_geodataframe(self.products)
        return gdf_products

    def download_json(self):
        return self.api.to_geojson(self.products)

    def download_one(self, key, path):
        self.api.download(key, path, True)
    def getMetaData(self, year, month, day):

        try:
            _date = datetime(int(year), int(month), int(day))
            api = SentinelAPI('USERNAME', 'PASSWORD',
                              'https://scihub.copernicus.eu/dhus')
            footprint = geojson_to_wkt(read_geojson('goldmine_sa.geojson'))
            products = api.query(footprint,
                                 date=(_date.strftime('%Y%m%d'),
                                       (_date +
                                        timedelta(days=1)).strftime('%Y%m%d')),
                                 platformname='Sentinel-1',
                                 producttype='GRD')

            fc = api.to_geojson(products)
            features = fc['features']
            for items in features:
                if json.dumps(items['properties']['filename']).replace(
                        '"', '') == fn:
                    metaData = items
            return metaData
        except:
            print 'couldnt retreive metadata'
            exit()
Beispiel #13
0
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
mypath = r'C:\Users\mccoy\PycharmProjects\untitled\gz_2010_us_outline_500k.json'
# products = api.query(footprint,
#                      producttype='SLC',
#                      orbitdirection='ASCENDING')
# api.download_all(products)

testAPI = SentinelAPI(user='******', password='******', api_url='https://s5phub.copernicus.eu/dhus')
footprint = geojson_to_wkt(read_geojson(mypath))
#products = testAPI.query(area = footprint, date = "[NOW-20DAYS TO NOW]", platformname='Sentinel-5p')

#Pollution map for the US
products = testAPI.query(area = footprint, date = "[NOW-1DAYS TO NOW]",producttype='L2__NO2___')

# download all results from the search
mypath = testAPI.download_all(products)
downloadedfile = mypath[0][next(iter(mypath[0]))]['path']
dir_path = os.path.dirname(os.path.realpath(__file__))
downloadedfile_full = dir_path + downloadedfile

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
testAPI.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
api.to_geodataframe(products)

#Get all data for the whole world
#products = testAPI.query(date = "[NOW-1DAYS TO NOW]")


print('x')
Beispiel #14
0
def apirequest(year="2017"):
    full_path = os.getcwd()
    os.chdir(Path(full_path).parents[0])
    downloadfolder = r"Produtos_Sentinel"
    try:
        if not os.path.exists(downloadfolder):
            os.makedirs(downloadfolder)
    except OSError:
        print("Erro: Criar Pasta " + downloadfolder)

    api = SentinelAPI('pfmacf', '4mcTUobqdf',
                      'https://scihub.copernicus.eu/dhus')
    path = "Dataset" + str(year)
    my_dir_path = Path(path)
    if not my_dir_path.is_dir():
        switcherfunc = {
            0: convertshapeepsg,
            1: createindividualshp,
            2: datatxt,
            3: getgeojson
        }
        for func in switcherfunc:
            _func_exec = switcherfunc[func](year)

    num_sub_folders = [name for name in os.listdir(path)]
    json_folder = check_jsonstats_folder("JsonApis")
    index_filename = "LastIndex_SentinelApi" + " " + str(year) + ".json"
    jpath = os.path.join(json_folder, index_filename)
    jsonstring = "Last Iteration" + " " + str(year)
    unique_txt = r"Unique_Products.txt"
    open(unique_txt, 'a').close()
    contador_vazios = 0
    my_file_index = Path(index_filename)

    if my_file_index.is_file():

        with open(index_filename, "r") as file:
            data = file.read()

        file_dict = json.loads(data)

        index = file_dict[jsonstring]

    else:
        index = 0

    for i in range(index, len(num_sub_folders)):

        pathfinal = os.path.join(path, str(i))
        pathtxt = os.path.join(pathfinal, "Data.txt")
        pathgeojson = os.path.join(pathfinal, "bbox4326.geojson")

        data = linecache.getline(pathtxt, 2)
        date_fire = data.rstrip('\n').split(" ")

        ano = int(date_fire[2].split("-")[0])
        mes = int(date_fire[2].split("-")[1])
        dia = int(date_fire[2].split("-")[2])
        search_date = [ano, mes, dia]

        products = get_footprint(api, pathgeojson, search_date, 14)
        print('\nNumero de produtos encontrados = ', (len(products)))
        print('\nIndex = ', i)
        if not len(products):
            products = get_footprint(api, pathgeojson, search_date, 30)
            if not len(products):
                products = get_footprint(api, pathgeojson, search_date, 60)
                if not len(products):
                    filename = "Sem_Produtos.txt"
                    filepath = os.path.join(pathfinal, filename)
                    write_to_file(filepath, mode="w", text="Invalido")
                    contador_vazios += 1
                    continue

        # converter para DataFrame
        products_df = api.to_dataframe(products)
        products_geojson = api.to_geojson(products)
        valid_list = validate_footprints(products_geojson, pathfinal, i, year)

        if valid_list:

            product_id, product_title = get_final_product(
                valid_list, products_df, pathfinal)
            product_info = api.get_product_odata(product_id)
            file = open(unique_txt, "r+", encoding="utf-8")

            line_found = any(product_title in line for line in file)
            if not line_found:
                write_to_file(unique_txt, mode="a", text=product_title)
                open("Products_to_Download.txt", 'a').close()
                check_availability_download(product_info, product_title,
                                            product_id, api, downloadfolder)
                file.close()
            else:
                file.close()

        else:
            filename = "Sem_Produtos.txt"
            filepath = os.path.join(pathfinal, filename)
            write_to_file(filepath, mode="w", text="Invalido")
            contador_vazios += 1

        json_dict = {jsonstring: i}
        json_dict_exception = {jsonstring: 0}

        if i < len(num_sub_folders) - 1:
            with open(jpath, 'w') as output:
                json.dump(json_dict, output, indent=4)
        else:
            with open(jpath, 'w') as output:
                json.dump(json_dict_exception, output, indent=4)

    print("Contagem de incendios sem Produto: ", contador_vazios)
class Downloader(object):
    def __init__(self,
                 username,
                 password,
                 satellite,
                 order_id,
                 directory=Path('/data/')):
        # The connection to ESA scihub
        self.api = SentinelAPI(username,
                               password,
                               'https://scihub.copernicus.eu/dhus',
                               timeout=500.00)

        # Sentinel-5p currently has its own pre-operations hub
        self.api_s5p = SentinelAPI(user='******',
                                   password='******',
                                   api_url='https://s5phub.copernicus.eu/dhus')

        # Use the current datetime to name the download order
        self.order_id = order_id

        # Use ordered dict to store the metadata of the queries products
        self.products = OrderedDict()

        self.satellite = satellite

        self.directory = directory
        # if not self.directory.exists():  # Create directory if it does not exist
        #     os.makedirs(self.directory)

    def query(self, footprint, startdate, enddate):
        if self.satellite == 's1' or self.satellite == 'all':
            self.query_s1(footprint, startdate, enddate)
        if self.satellite == 's2' or self.satellite == 'all':
            self.query_s2(footprint, startdate, enddate)
        if self.satellite == 's3' or self.satellite == 'all':
            self.query_s3(footprint, startdate, enddate)
        if self.satellite == 's5p' or self.satellite == 'all':
            self.query_s5p(footprint, startdate, enddate)

    def query_s1(self, footprint, startdate, enddate):
        # Define producttypes (here it is Sentinel-1 GRDH products)
        producttypes = ['GRD']

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        print(str(footprint))
        if FLAGS.s2_intersection:
            for producttype in producttypes:
                queried_products = self.api.query(
                    area=footprint,
                    date=(startdate, enddate),
                    platformname='Sentinel-1',
                    #area_relation='Contains',
                    producttype=producttype,
                    sensoroperationalmode='IW',
                    polarisationmode='VV VH')
                self.products.update(queried_products)
                self.intersect_products()
        elif FLAGS.s1_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(
                    area=footprint,
                    date=(startdate, enddate),
                    platformname='Sentinel-1',
                    #area_relation='Contains',
                    producttype=producttype,
                    sensoroperationalmode='IW',
                    polarisationmode='VV VH')
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s1_relative_orbit:
                    queried_products = self.api.query(
                        area=footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-1',
                        producttype=producttype,
                        #area_relation='Contains',
                        sensoroperationalmode='IW',
                        relativeorbitnumber=relative_orbit)
                    self.products.update(queried_products)

    def query_s2(self, footprint, startdate, enddate):
        # Load parameters from FLAGS
        max_cloudcoverage = FLAGS.s2_max_cloudcoverage

        # Define producttypes (here it is Sentinel-2 L2A products)
        producttypes = [
            'S2MSI2Ap', 'S2MSI2A'
        ]  # Producttype names differ depending on the year they were published

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        if FLAGS.s2_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(
                    footprint,
                    date=(startdate, enddate),
                    platformname='Sentinel-2',
                    producttype=producttype,
                    cloudcoverpercentage=(0, max_cloudcoverage),
                    order_by='-ingestiondate')
                self.only_complete_tile(queried_products)
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s2_relative_orbit:
                    queried_products = self.api.query(
                        footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-2',
                        relativeorbitnumber=relative_orbit,
                        producttype=producttype,
                        cloudcoverpercentage=(0, max_cloudcoverage))
                    self.only_complete_tile(queried_products)
                    self.products.update(queried_products)

    def query_s3(self, footprint, startdate, enddate):
        queried_products = self.api.query(footprint,
                                          date=(startdate, enddate),
                                          platformname='Sentinel-3',
                                          producttype='SL_2_LST___',
                                          productlevel='L2')

        self.products.update(queried_products)

    def query_s5p(self, footprint, startdate, enddate):
        kwargs = {}
        producttypedescriptions = [
            'Ozone', 'Sulphur Dioxide', 'Nitrogen Dioxide', 'Methane',
            'Formaldehyde', 'Carbon Monoxide', 'Aerosol Index',
            'Aerosol Layer Height', 'Cloud'
        ]
        # producttypedescriptions = ['Ozone']

        # Loop over producttypes and update the query dictionary
        for producttypedescription in producttypedescriptions:
            queried_products = self.api_s5p.query(
                footprint,
                date=(startdate, enddate),
                platformname='Sentinel-5 Precursor',
                processinglevel='L2',
                producttypedescription=producttypedescription,
                **kwargs)
            # Remove any 'Suomi-NPP VIIRS Clouds' products which are returned as 'Cloud' (they shouldn't have been)
            # https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-5p/products-algorithms
            if producttypedescription == 'Cloud':
                temp_queried_products = queried_products.copy()
                for key in queried_products.keys():
                    if queried_products[key][
                            'producttypedescription'] != 'Cloud':
                        del temp_queried_products[key]
                queried_products = temp_queried_products
            self.products.update(queried_products)

    def print_num_and_size_of_products(self):
        logging.info('Number of products = ' + str(len(list(self.products))))
        logging.info('Total size [GB] = ' +
                     str(self.api.get_products_size(self.products)))

    # https://sentinelsat.readthedocs.io/en/master/api.html#lta-products
    # TODO: Get LTA retrieval to work properly (install of newest sentinelsat version is in dockerfile)
    # Retry every 30 min (+10 second buffertime) to request LTA products.
    @tenacity.retry(stop=tenacity.stop_after_attempt(200),
                    wait=tenacity.wait_fixed(1810))
    def download_zipfiles(self):
        zipfiles_directory = self.directory / 'zipfiles'
        if len(self.products) == 0:
            logging.info('Unable to find any products for the selected biome')
            sys.exit(0)
            return
        if not zipfiles_directory.exists(
        ):  # Create directory if it does not exist
            os.makedirs(zipfiles_directory)
        # Get the products to be downloaded. The sample() funcitons permutes the dataframe, such that a new LTA product
        # is request at every retry. The optimal solution would have been to rearrange the dataframe by rotating the
        # index at every retry, but this is a quick and dirty way to achieve something similar.
        # (https://stackoverflow.com/a/34879805/12045808).
        products_df = self.queried_products_as_df().sample(frac=1)

        # NOTE: The code below is only useful while the Sentinel-5p has a different api than the others. After this has
        #       been fixed, the code should be reduced to the following single line:
        # Download all zipfiles (it automatically checks if zipfiles already exist)
        # self.api.download_all(self.products, directory_path=zipfiles_directory)  # Download all zipfiles
        # But for now, use the following code:
        non_s5p_products = products_df[
            products_df['platformname'] != 'Sentinel-5 Precursor']
        s5p_products = products_df[products_df['platformname'] ==
                                   'Sentinel-5 Precursor']

        if len(non_s5p_products):
            logging.info("Downloading Sentinel-1/2/3 products")
            try:
                downloaded, triggered, failed = self.api.download_all(
                    non_s5p_products.to_dict(into=OrderedDict, orient='index'),
                    directory_path=zipfiles_directory)
                logging.info("Downloaded: " + str(downloaded))
                logging.info("Triggered: " + str(triggered))
                logging.info("failed: " + str(failed))
            except InvalidChecksumError:
                logging.info("Error downloading products due to CheckSumError")
            except Exception:
                logging.info("Error downloading products due to unkown error")
        else:
            logging.info("No Sentinel-1/2/3 products found in query")

        if len(s5p_products):
            logging.info("Downloading Sentinel-5p products")
            self.api_s5p.download_all(s5p_products.to_dict(into=OrderedDict,
                                                           orient='index'),
                                      directory_path=zipfiles_directory)
        else:
            logging.info("No Sentinel-5p products found in query")

        # The Sentinel-5p data has wrongly been given the filetype .zip, but it should be .nc, so make a copy with
        # .nc extension. A copy is made instead of renaming so sentinelsat doesn't re-download the file every time
        # it is run.
        s5p_downloaded_files = zipfiles_directory.glob('S5P*.zip')
        logging.debug(
            "Renaming downloaded Sentinel-5p files from .zip to .nc (due to bug in SentinelSat)"
        )
        for file in s5p_downloaded_files:
            if not file.with_suffix('.nc').exists():
                shutil.copy(str(file), str(file.with_suffix('.nc')))

    def queried_products_as_geojson(self):
        return self.api.to_geojson(self.products)

    def only_complete_tile(self, products):
        found_one = False
        delete_list = []
        for i in products:
            local_footprint = products.get(i).get('footprint')
            elements = local_footprint.split(',')
            if len(elements) == 5 and found_one == False:
                found_one = True
                continue
            else:
                delete_list.append(i)
        for i in delete_list:
            del products[i]

    def intersect_products(self):
        print('Found ' + str(len(self.products)) + ' products')
        S2_geojson_path = (self.directory / 'orders' /
                           FLAGS.s2_order_id).with_suffix('.geojson')
        ground_geojsons = read_geojson(S2_geojson_path)
        products_geojsons = self.queried_products_as_geojson()
        ground_polygon = ground_geojsons.get('features')[0].get(
            'geometry').get('coordinates')
        ground_polygon = geometry.Polygon(ground_polygon[0][0])
        titles = []
        ids = []
        for item in products_geojsons.get('features'):
            id = item.get('properties').get('id')
            item = item.get('properties').get('title')
            item = (item[17:25] + item[48:55])
            titles.append(item)
            ids.append([item, id])
        unique = list(set(titles))
        unique.sort()
        union_list = []
        for i, element in enumerate(unique):
            local_polygon = Polygon()
            for j in range(len(titles)):
                if titles[j] == element:
                    item = products_geojsons.get('features')[j]
                    item = item.get('geometry').get('coordinates')
                    item = geometry.Polygon(item[0][0])
                    item = affinity.scale(item, xfact=1.01, yfact=1.01)
                    polygons = [item, local_polygon]
                    local_polygons = unary_union(polygons)
                    local_polygon = item
            union_list.append([local_polygons, element])
        found_id = None
        for index, element in enumerate(union_list):
            wkt = element[0].wkt
            if ground_polygon.within(element[0]):
                found_id = element[1]
                break
        for i in ids:
            if found_id != i[0]:
                del self.products[i[1]]
        print('Reduced the products to ' + str(len(self.products)) +
              ' products')

    def queried_products_as_df(self):
        return self.api.to_dataframe(self.products)

    def save_queried_products(self):
        orders_directory = self.directory / 'orders'
        if not orders_directory.exists():
            os.makedirs(orders_directory)

        # Save the queried products to a geojson file (e.g. to be loaded into QGIS)
        geojson_path = (self.directory / 'orders' /
                        self.order_id).with_suffix('.geojson')
        with geojson_path.open('w') as geojson_file:
            geojson_data = self.api.to_geojson(self.products)
            geojson_file.write(str(geojson_data))

        # Save the queried products as pandas df in a pkl file (preferred format when working in Python)
        df_path = (self.directory / 'orders' /
                   self.order_id).with_suffix('.pkl')
        df = self.api.to_dataframe(self.products)
        df.to_pickle(df_path)

    def save_queried_products_location(self, path):
        path = Path(path)
        path = path.parent.absolute()
        path = path / 'log'
        # Save the queried products to a geojson file (e.g. to be loaded into QGIS)
        geojson_path = (path / self.order_id).with_suffix('.geojson')
        with geojson_path.open('w') as geojson_file:
            geojson_data = self.api.to_geojson(self.products)
            geojson_file.write(str(geojson_data))

        # Save the queried products as pandas df in a pkl file (preferred format when working in Python)
        df_path = (path / self.order_id).with_suffix('.pkl')
        df = self.api.to_dataframe(self.products)
        df.to_pickle(df_path)
Beispiel #16
0
def main(args):
    """ Runs dataLayer processing scripts to turn raw dataLayer from (../raw) into
        cleaned dataLayer ready to be analyzed (saved in ../processed).
    """
    ## Talk to Rune about how dataLayer is handle. If it should be part of the "big" project.
    ## set number_tiles:1764
    config = TrainingConfig()
    config = update_config(args, config)
    logger = logging.getLogger(__name__)
    logger.info('making final dataLayer set from raw dataLayer')
    userTuple = [['pandagud', 'damp4ever'], ['pandagud2', 'damp4ever'],
                 ['pandagud3', 'damp4ever'], ['au524478', 'Palantir1234']]
    current_user = random.choice(userTuple)

    api = SentinelAPI(current_user[0], current_user[1],
                      'https://scihub.copernicus.eu/dhus')

    # search by polygon, time, and SciHub query keywords
    path = r"C:\Users\panda\Downloads\LC80290292014132LGN00.geojson"
    footprint = geojson_to_wkt(read_geojson(path))
    products = api.query(area=footprint,
                         date=('20210101', '20210105'),
                         platformname='Sentinel-2',
                         order_by='+ingestiondate',
                         limit=1)
    areas = api.to_geodataframe(products)
    geojson = api.to_geojson(products)
    api.download_all(products, into=r'C:\Users\panda\Sat_paper\Alfa')

    products = api.query(area=footprint,
                         date=('20210401', '20210430'),
                         producttype='GRD',
                         platformname='Sentinel-1',
                         sensoroperationalmode='IW',
                         polarisationmode='VV VH',
                         order_by='ingestiondate')
    firstproduct = next(iter(products))
    online_product = ''
    for i in products:
        is_online = api.is_online(products.get(i).get('uuid'))
        if is_online:
            online_product = i
            break
    delete_list = []
    for i in products:
        if i != online_product:
            delete_list.append(i)
    for i in delete_list:
        del products[i]

    ground_geojsons = read_geojson(path)
    products_geojsons = api.to_geojson(products)

    ground_polygon = ground_geojsons.get('features')[0].get('geometry').get(
        'coordinates')
    ground_polygon = geometry.Polygon(ground_polygon[0][0])
    import numpy as np
    titles = []
    ids = []
    for item in products_geojsons.get('features'):
        id = item.get('properties').get('id')
        item = item.get('properties').get('title')
        item = (item[17:25] + item[48:55])
        titles.append(item)
        ids.append([item, id])
    unique = list(set(titles))
    union_list = []
    for i, element in enumerate(unique):
        local_polygon = Polygon()
        for j in range(len(titles)):
            if titles[j] == element:
                item = products_geojsons.get('features')[j]
                item = item.get('geometry').get('coordinates')
                item = geometry.Polygon(item[0][0])
                item = affinity.scale(item, xfact=1.01, yfact=1.01)
                polygons = [item, local_polygon]
                local_polygons = unary_union(polygons)
                local_polygon = item
        union_list.append([local_polygons, element])
    for index, element in enumerate(union_list):
        wkt = element[0].wkt
        if ground_polygon.within(element[0]):
            found_id = element[1]
            break
    for i in ids:
        if found_id != i[0]:
            del products[i[1]]
    area_list = []
    for index, item in enumerate(products_geojsons.get('features')):
        item = item.get('geometry').get('coordinates')
        item = geometry.Polygon(item[0][0])
        local_intersection = item.intersection(ground_polygon)
        local_intersection = [local_intersection.area, index]
        area_list.append(local_intersection)
    area_list.sort(reverse=True)
    for index in range(len(area_list)):
        item = products_geojsons.get('features')[area_list[index][1]]
        id = item.get('properties').get('id')
        item = item.get('geometry').get('coordinates')
        item = geometry.Polygon(item[0][0])
        if item.intersects(ground_polygon):
            local_intersection = ground_polygon.intersection(item)
            print(str(ground_polygon.area))
            print(str(local_intersection.area))
            # ground_polygon = ground_polygon.difference(local_intersection)
            ground_polygon = (ground_polygon.symmetric_difference(
                local_intersection)).difference(local_intersection)
        else:
            del products[id]
    import datetime
    from datetime import timedelta
    S2_geojson = read_geojson(path)

    start_S1_date = S2_geojson.get('features')[0].get('properties').get(
        'ingestiondate')
    start_S1_date = start_S1_date.split('T')[0]
    start_S1_date = datetime.datetime.strptime(start_S1_date,
                                               '%Y-%m-%d').date()
    ## New end date for S1
    end_S1_date = start_S1_date + timedelta(days=7)
    start_S1_date = start_S1_date - timedelta(days=7)
    start_S1_date_str = str(start_S1_date).replace('-', '')
    end_S1_date_str = str(end_S1_date).replace('-', '')

    ## COMBINE FOOTPRINT
    geom_in_geojson = []
    geom_in_geojson.append(
        geojson.Feature(geometry=ground_polygon,
                        properties={"MissingData": "Test"}))
    feature_collection = FeatureCollection(geom_in_geojson)
    pathToFile = r'C:\Users\panda\Sat_paper\missing.geojson'
    with open(pathToFile, 'w') as f:
        dump(feature_collection, f)

    print("Done")
class Downloader(object):
    def __init__(self,
                 username,
                 password,
                 satellite,
                 order_id,
                 directory=Path('/data/')):
        # The connection to ESA scihub
        self.api = SentinelAPI(username, password,
                               'https://scihub.copernicus.eu/dhus')

        # Sentinel-5p currently has its own pre-operations hub
        self.api_s5p = SentinelAPI(user='******',
                                   password='******',
                                   api_url='https://s5phub.copernicus.eu/dhus')

        # Use the current datetime to name the download order
        self.order_id = order_id

        # Use ordered dict to store the metadata of the queries products
        self.products = OrderedDict()

        self.satellite = satellite

        self.directory = directory
        # if not self.directory.exists():  # Create directory if it does not exist
        #     os.makedirs(self.directory)

    def query(self, footprint, startdate, enddate):
        if self.satellite == 's1' or self.satellite == 'all':
            self.query_s1(footprint, startdate, enddate)
        if self.satellite == 's2' or self.satellite == 'all':
            self.query_s2(footprint, startdate, enddate)
        if self.satellite == 's3' or self.satellite == 'all':
            self.query_s3(footprint, startdate, enddate)
        if self.satellite == 's5p' or self.satellite == 'all':
            self.query_s5p(footprint, startdate, enddate)

    def query_s1(self, footprint, startdate, enddate):
        # Define producttypes (here it is Sentinel-1 GRDH products)
        producttypes = ['GRD']

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        if FLAGS.s1_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(footprint,
                                                  date=(startdate, enddate),
                                                  platformname='Sentinel-1',
                                                  producttype=producttype,
                                                  sensoroperationalmode='IW')
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s1_relative_orbit:
                    queried_products = self.api.query(
                        footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-1',
                        producttype=producttype,
                        sensoroperationalmode='IW',
                        relativeorbitnumber=relative_orbit)
                    self.products.update(queried_products)

    def query_s2(self, footprint, startdate, enddate):
        # Load parameters from FLAGS
        max_cloudcoverage = FLAGS.s2_max_cloudcoverage

        # Define producttypes (here it is Sentinel-2 L2A products)
        producttypes = [
            'S2MSI2Ap', 'S2MSI2A'
        ]  # Producttype names differ depending on the year they were published

        # Loop over producttypes and update the query dictionary
        # TODO: Fix this inefficient way of querying the relative orbits
        if FLAGS.s2_relative_orbit == [0]:
            for producttype in producttypes:
                queried_products = self.api.query(
                    footprint,
                    date=(startdate, enddate),
                    platformname='Sentinel-2',
                    producttype=producttype,
                    cloudcoverpercentage=(0, max_cloudcoverage))
                self.products.update(queried_products)

        else:
            for producttype in producttypes:
                for relative_orbit in FLAGS.s2_relative_orbit:
                    queried_products = self.api.query(
                        footprint,
                        date=(startdate, enddate),
                        platformname='Sentinel-2',
                        relativeorbitnumber=relative_orbit,
                        producttype=producttype,
                        cloudcoverpercentage=(0, max_cloudcoverage))
                    self.products.update(queried_products)

    def query_s3(self, footprint, startdate, enddate):
        queried_products = self.api.query(footprint,
                                          date=(startdate, enddate),
                                          platformname='Sentinel-3',
                                          producttype='SL_2_LST___',
                                          productlevel='L2')

        self.products.update(queried_products)

    def query_s5p(self, footprint, startdate, enddate):
        kwargs = {}
        producttypedescriptions = [
            'Ozone', 'Sulphur Dioxide', 'Nitrogen Dioxide', 'Methane',
            'Formaldehyde', 'Carbon Monoxide', 'Aerosol Index',
            'Aerosol Layer Height', 'Cloud'
        ]
        # producttypedescriptions = ['Ozone']

        # Loop over producttypes and update the query dictionary
        for producttypedescription in producttypedescriptions:
            queried_products = self.api_s5p.query(
                footprint,
                date=(startdate, enddate),
                platformname='Sentinel-5 Precursor',
                processinglevel='L2',
                producttypedescription=producttypedescription,
                **kwargs)
            # Remove any 'Suomi-NPP VIIRS Clouds' products which are returned as 'Cloud' (they shouldn't have been)
            # https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-5p/products-algorithms
            if producttypedescription == 'Cloud':
                temp_queried_products = queried_products.copy()
                for key in queried_products.keys():
                    if queried_products[key][
                            'producttypedescription'] != 'Cloud':
                        del temp_queried_products[key]
                queried_products = temp_queried_products
            self.products.update(queried_products)

    def print_num_and_size_of_products(self):
        logging.info('Number of products = ' + str(len(list(self.products))))
        logging.info('Total size [GB] = ' +
                     str(self.api.get_products_size(self.products)))

    # https://sentinelsat.readthedocs.io/en/master/api.html#lta-products
    # TODO: Get LTA retrieval to work properly (install of newest sentinelsat version is in dockerfile)
    # Retry every 30 min (+10 second buffertime) to request LTA products.
    @tenacity.retry(stop=tenacity.stop_after_attempt(200),
                    wait=tenacity.wait_fixed(1810))
    def download_zipfiles(self):
        zipfiles_directory = self.directory / 'zipfiles'
        if not zipfiles_directory.exists(
        ):  # Create directory if it does not exist
            os.makedirs(zipfiles_directory)

        # Get the products to be downloaded. The sample() funcitons permutes the dataframe, such that a new LTA product
        # is request at every retry. The optimal solution would have been to rearrange the dataframe by rotating the
        # index at every retry, but this is a quick and dirty way to achieve something similar.
        # (https://stackoverflow.com/a/34879805/12045808).
        products_df = self.queried_products_as_df().sample(frac=1)

        # NOTE: The code below is only useful while the Sentinel-5p has a different api than the others. After this has
        #       been fixed, the code should be reduced to the following single line:
        # Download all zipfiles (it automatically checks if zipfiles already exist)
        # self.api.download_all(self.products, directory_path=zipfiles_directory)  # Download all zipfiles
        # But for now, use the following code:
        non_s5p_products = products_df[
            products_df['platformname'] != 'Sentinel-5 Precursor']
        s5p_products = products_df[products_df['platformname'] ==
                                   'Sentinel-5 Precursor']

        if len(non_s5p_products):
            logging.info("Downloading Sentinel-1/2/3 products")
            self.api.download_all(non_s5p_products.to_dict(into=OrderedDict,
                                                           orient='index'),
                                  directory_path=zipfiles_directory)
        else:
            logging.info("No Sentinel-1/2/3 products found in query")

        if len(s5p_products):
            logging.info("Downloading Sentinel-5p products")
            self.api_s5p.download_all(s5p_products.to_dict(into=OrderedDict,
                                                           orient='index'),
                                      directory_path=zipfiles_directory)
        else:
            logging.info("No Sentinel-5p products found in query")

        # The Sentinel-5p data has wrongly been given the filetype .zip, but it should be .nc, so make a copy with
        # .nc extension. A copy is made instead of renaming so sentinelsat doesn't re-download the file every time
        # it is run.
        s5p_downloaded_files = zipfiles_directory.glob('S5P*.zip')
        logging.debug(
            "Renaming downloaded Sentinel-5p files from .zip to .nc (due to bug in SentinelSat)"
        )
        for file in s5p_downloaded_files:
            if not file.with_suffix('.nc').exists():
                shutil.copy(str(file), str(file.with_suffix('.nc')))

    def queried_products_as_geojson(self):
        return self.api.to_geojson(self.products)

    def queried_products_as_df(self):
        return self.api.to_dataframe(self.products)

    def save_queried_products(self):
        orders_directory = self.directory / 'orders'
        if not orders_directory.exists():
            os.makedirs(orders_directory)

        # Save the queried products to a geojson file (e.g. to be loaded into QGIS)
        geojson_path = (self.directory / 'orders' /
                        self.order_id).with_suffix('.geojson')
        with geojson_path.open('w') as geojson_file:
            geojson_data = self.api.to_geojson(self.products)
            geojson_file.write(str(geojson_data))

        # Save the queried products as pandas df in a pkl file (preferred format when working in Python)
        df_path = (self.directory / 'orders' /
                   self.order_id).with_suffix('.pkl')
        df = self.api.to_dataframe(self.products)
        df.to_pickle(df_path)
 condition = True
 while condition:
     try:
         api = SentinelAPI('USERNAME', 'PASSWORD', 'https://scihub.copernicus.eu/dhus')
         footprint = geojson_to_wkt(read_geojson('goldmine_sa.geojson'))
         products = api.query(
             footprint,
             date=(_date.strftime('%Y%m%d'), (_date+timedelta(days=1)).strftime('%Y%m%d')),
             platformname='Sentinel-1',
             producttype='GRD'
         )
     except Exception as e:
         print('{} for date: {}'.format(e, _date.strftime('%Y-%m-%d')))
         logging.error('{} for date: {}'.format(e, _date.strftime('%Y-%m-%d')))
     else:
         fc = api.to_geojson(products)
         features = fc['features']
         if len(features):
             print(_date)
             for feature in features:
                 properties = feature.properties
                 # properties
                 id = properties['id']
                 identifier = properties['identifier']
                 title = properties['title']
                 footprint = str(feature['geometry'])
                 #slave
                 acquisitiontype = properties['acquisitiontype']
                 beginposition = properties['beginposition']
                 endposition = properties['endposition']
                 filename = properties['filename']
api = SentinelAPI('fabiolana', 'Albertone_2016')
footprint = geojson_to_wkt(read_geojson('/media/sf_Downloads/__effis/ricerca.geojson'))

products = api.query(footprint,
                     # date=('20170801', date(2017, 12, 15)),
                     date=('20151001', '20171205'),
                     platformname='Sentinel-2',
                     # producttype='SLC')
                     #,orbitdirection='ASCENDING') #,
                     cloudcoverpercentage=(0, 10))


# download all results from the search
# api.download_all(products)

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
api.to_geojson(products)

# inserito in dataframe
df_prods = pd.DataFrame(api.to_geojson(products))
# print df_prods

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
print api.to_geodataframe(products)

# Get basic information about the product: its title, file size, MD5 sum, date, footprint and
# its download url
# api.get_product_odata(<product_id>)

# Get the product's full metadata available on the server
# api.get_product_odata(<product_id>, full=True)
Beispiel #20
0
# todo check for processed data
len(products)

# convert to Pandas DataFrame
products_df = api.to_dataframe(products)

# sort for most recent and lowest cloud cover
products_df_sorted = products_df.sort_values(['ingestiondate', 'cloudcoverpercentage'], ascending=[True, True])

test = products_df_sorted.head(1)
test['cloudcoverpercentage']
test['ingestiondate']
set(products_df['processinglevel'])

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
geojson_products = api.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
geodata_products = api.to_geodataframe(products)

# plot product time vs cloudcover
# data = [go.Scatter(x=products_df_sorted.ingestiondate, y=products_df_sorted[['cloudcoverpercentage']])]
# py.plotly.iplot(data, filename = 'time-series-simple')

# download
api.download_all(test.index)

# unzip
# todo: needs loop to run through the data sets
directory_to_extract_to = "unzip"
zip = zipfile.ZipFile(str(test.title[0]) + '.zip')
def query_sentinel_data(cfg, save_json=True):
    workpath = Path(os.getcwd())

    # api = SentinelAPI('puzhao', 'kth10044ESA!', 'https://scihub.copernicus.eu/dhus')
    # user, password = "******", "19940911"

    api = SentinelAPI(cfg.user, cfg.password,
                      'https://scihub.copernicus.eu/dhus')

    now = datetime.now().strftime("%Y-%m-%dT%H%M%S")
    today = datetime.today().strftime("%Y-%m-%d")
    if cfg.start_date is None:
        cfg.start_date = (datetime.today() +
                          timedelta(-1)).strftime("%Y-%m-%d")
    if cfg.end_date is None:
        cfg.end_date = (datetime.today() + timedelta(1)).strftime("%Y-%m-%d")
    print("now: ", now)

    cfg.update({
        "roi_url": cfg.roi_url,
        'placename': "British Columbia",
        "query_by": "roi",  # 'place' has problem
        "query_date": today,
        "start_date": cfg.start_date,
        "end_date": cfg.end_date,
        "platformname": cfg.platformname,  # Sentinel-2
        "producttype": cfg.producttype,  # S2MSI1C, S2MSI2A

        # 'relativeorbitnumber': 84,
        # "orbitdirection": "ASCENDING",
        "download_flag": cfg.download_flag,
        "download_one": True,  # download one by one
        "download_all": True,  # download all once
    })

    pprint(cfg)

    Sat_Abb_Dict = {'Sentinel-1': 'S1', 'Sentinel-2': 'S2', 'Sentinel-3': 'S3'}
    SAT = Sat_Abb_Dict[cfg.platformname]

    datafolder = Path(cfg.datafolder)
    savePath = datafolder / "data" / f"{SAT}_{cfg.producttype}"
    if not os.path.exists(savePath): os.makedirs(savePath)

    cfg.update({"sat_folder": str(savePath)})

    cfg.download_all = False if cfg.download_one else True
    cfg.download_all = cfg.download_all and cfg.download_flag
    cfg.download_one = cfg.download_one and cfg.download_flag

    if cfg.query_by == "roi":
        footprint = geojson_to_wkt(read_geojson(str(workpath / cfg.roi_url)))
        roi_name = os.path.split(cfg.roi_url)[-1].split(".")[0]

    if cfg.query_by == "place":
        footprint = placename_to_wkt(cfg.placename)
        roi_name = cfg.placename.replace(" ", "_")
    # print(BC)

    ### DSC rorb = 22
    if cfg.platformname == "Sentinel-1":
        cfg.checkProperty = "system:index"
        cfg.check_eeImgCol = "COPERNICUS/S1_GRD"

        products = api.query(
            footprint,
            date=(cfg.start_date.replace("-",
                                         ""), cfg.end_date.replace("-", "")),
            platformname=cfg.platformname,
            producttype=cfg.producttype,
            order_by='+beginposition',
        )

    else:  # S2, S3 ...
        cfg.checkProperty = "PRODUCT_ID"
        cfg.check_eeImgCol = "COPERNICUS/S2" if 'S2MSI1C' == cfg.producttype else "COPERNICUS/S2_SR"

        products = api.query(
            footprint,
            date=(cfg.start_date.replace("-",
                                         ""), cfg.end_date.replace("-", "")),
            platformname=cfg.platformname,
            producttype=cfg.producttype,
            order_by='+beginposition',
            cloudcoverpercentage=(0, cfg.cloudcoverpercentage),  # for S2 only
        )

    # print(products['0c05435b-0cd3-45a0-93f4-8c317eb1d558'])
    print("\n\n===========> Sentinel Auto-Query <============")

    products_df = api.to_dataframe(products)
    # print(products_df.keys())
    # print(products_df.index)
    # pprint(products_df[['sensoroperationalmode', 'orbitdirection', 'relativeorbitnumber']])

    products_dict = products_df.transpose().to_dict()

    products_list = products_df.index.tolist()
    if len(products_list) > 0:
        example_dict = products_dict[products_list[0]]
        property_list = [
            key for key in example_dict.keys()
            if is_jsonable(example_dict[key])
        ]
    # pprint(products_dict.keys())

    # select property for saving to json
    orbit_dict = {'ASCENDING': 'ASC', 'DESCENDING': 'DSC'}
    products_QueryInfo = edict()
    checkImgCol = ee.ImageCollection(f"{cfg.check_eeImgCol}")

    if SAT == "S1":
        sentinel_asset = ee.ImageCollection("users/omegazhangpzh/Sentinel1")
    if SAT == "S2":
        sentinel_asset = ee.ImageCollection("users/omegazhangpzh/Sentinel2")

    for product_id in products_dict.keys():
        title = products_dict[product_id]['title']
        filtered_size = ee.Number(checkImgCol.filter(ee.Filter.eq(cfg.checkProperty, title)).size())\
                    .add(sentinel_asset.filter(ee.Filter.eq(cfg.checkProperty, title)).size()).getInfo()
        flag = filtered_size > 0
        print(title, flag)

        # flag = False
        if not flag:  # if this product is not available in GEE
            # print(title)
            # print(title, flag.getInfo())
            products_QueryInfo[title] = {
                key: products_dict[product_id][key]
                for key in property_list
            }
            # products_QueryInfo[title]['product_id'] = product_id

            orbit_direction = products_dict[product_id]['orbitdirection']
            orbit_num = products_dict[product_id]['relativeorbitnumber']

            products_QueryInfo[title]['orbit_key'] = orbit_dict[
                orbit_direction] + "_" + str(orbit_num)

    QueryInfo = edict()
    QueryInfo["products"] = products_QueryInfo

    QueryInfo["results"] = edict()
    QueryInfo["results"]['total_number'] = len(products_QueryInfo.keys())
    QueryInfo["results"]['products_list'] = sorted(
        list(products_QueryInfo.keys()))
    QueryInfo["results"]['orbKey_list'] = list(
        set([
            products_QueryInfo[product]['orbit_key']
            for product in list(products_QueryInfo.keys())
        ]))

    QueryInfo["cfg"] = cfg

    # roi_name = os.path.split(cfg.roi_url)[-1].split(".")[0]
    jsonPath = datafolder / "outputs" / roi_name
    if not os.path.exists(str(jsonPath)):
        os.makedirs(jsonPath)

    if save_json:
        """ save as json """
        json_url = jsonPath / f"{SAT}_{cfg.producttype}_{now}.json"
        print("\njson_url: " + str(json_url))

        with open(str(json_url), 'w') as fp:
            json.dump(edict(QueryInfo), fp, ensure_ascii=False, indent=4)
        """ save as geojson """
        import geojson
        with open(jsonPath / f"S1_{cfg.producttype}_{now}.geojson", 'w') as fp:
            geojson.dump(api.to_geojson(products), fp, indent=4)

    print()
    # print(footprint)
    print("now: ", now)
    print("Total Number of Searched Products:" +
          str(len(QueryInfo["results"]['products_list'])))
    pprint(QueryInfo["results"]['products_list'])

    return QueryInfo