Exemplo n.º 1
0
def getS1Data(geojson, max_size):
    # search by polygon, time, and SciHub query keywords
    footprint = geojson_to_wkt(read_geojson(geojson))
    date = time.strftime("%Y%m%d")
    yestdate = str(int(date) - 1)

    products = api.query(footprint, (yestdate, date),
                         platformname='Sentinel-1',
                         producttype='GRD',
                         sensoroperationalmode='IW')
    results = []

    if len(products) == 0:
        print("No files found at date: " + date)
        quit()
    print("Found", len(products), "Sentinel-1 images.")

    products_df = api.to_dataframe(products).sort_values('size',
                                                         ascending=False)
    for i in range(len(products_df)):
        product_size = int(api.get_product_odata(products_df.index[i])["size"])
        if (product_size < max_size):
            results.append(products_df.index[i])
            print "Name:", api.get_product_odata(products_df.index[i])["title"], "size:", \
            str(product_size / 1000000), "MB.  |ADDED|"
        else:
            print "Name:", api.get_product_odata(products_df.index[i])["title"], "size:", \
             str(product_size / 1000000), "MB.  |TOO BIG|"

    return results
Exemplo n.º 2
0
    def download_error_image(img_date,geo_img,img_id,username,password):
        '''
        After read error file(image_error.txt) you can get image info which you failed from COG Sentinel-2, you can use this info with this function
        if you have more than 1 image, you can download with for loop.

        You can find img_date, geo_img and img_id information in image_error.txt file.

        api,target_image_id=download_error_image(img_date,geo_img,img_id,username,password)
        api.download(target_image_id,directory_path='.')
        api.download('7be30c50-31fc-48c4-ab45-fddea9be7877',directory_path='.')

        if you get error like >> Product 7be30c50-31fc-48c4-ab45-fddea9be7877 is not online. Triggering retrieval from long term archive.
        Go to https://sentinelsat.readthedocs.io/en/stable/api.html#lta-products

        username and password should be string
        '''
        api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
        day_before =img_date- datetime.timedelta(days=1)
        day_after =img_date + datetime.timedelta(days=1)
        footprint = geojson_to_wkt(geo_img)
        products = api.query(footprint,
                             #date = ('20181219', date(2018, 12, 29)),
                             date=(day_before,day_after),
                             platformname = 'Sentinel-2',
                             )
        sat_df=api.to_geodataframe(products)
        result=sat_df.loc[sat_df['title']==img_id]
        return api,result.index.values[0]
Exemplo n.º 3
0
def iterate_geojson_job(directory, directory2, api):

    for subdir in listdir_nohidden(directory):

        for filename in os.listdir(subdir):
            if filename.endswith(".geojson"):

                #print("subdir: " + subdir)
                filename2 = os.path.join(subdir, filename)

                #print("filename2: " + filename2)

                subdir_min = subdir.replace(directory, '')
                filename_min = filename.replace('.geojson', '')

                # print(subdir_min)
                # print(filename_min)

                #   For Sentinel 1 Data
                if re.search('s1a', subdir_min, re.IGNORECASE):
                    scene_dir = os.path.join(directory2, 'S1A',
                                             'S1A_' + filename_min)
                elif re.search('s1b', subdir_min, re.IGNORECASE):
                    scene_dir = os.path.join(directory2, 'S1B',
                                             'S1B_' + filename_min)

                #If GeoJSON folder does not exist
                if not os.path.exists(scene_dir):
                    os.makedirs(scene_dir)
                    print(scene_dir + " folder created.")

                footprint = geojson_to_wkt(read_geojson(filename2))
                products = api.query(footprint,
                                     date=("NOW-2DAY", "NOW"),
                                     platformname='Sentinel-1')
Exemplo n.º 4
0
def ObtainProduct_sentinel(JsonFile, Sdate, Edate, Cldmin, Cldmax):
    # This will extract the sentinnel product and stored in Dictionary as SceneID and Date when Image collected
    # JsonFile = Name of the geojson file of Area of Interest
    # Sdate    = Start date (eg. 20150101 YYYYMMDD)
    # Edate    = End Date (eg. 20160101 YYYYMMDD)
    CldCoverrange = '[' + str(Cldmin) + ' TO ' + str(Cldmax) + ']'
    footprints = geojson_to_wkt(read_geojson(JsonFile))
    #print footprints
    products = api.query(footprints, (Sdate, Edate),
                         platformname='Sentinel-2',
                         cloudcoverpercentage=CldCoverrange)
    a = api.to_geodataframe(products)
    #print a
    # Collect all the data available and its date and product ID
    Result = {'Pr_Id': [], 'Datetime': [], 'Geometry': [], 'Identifier': []}
    if len(products) == 0:
        print 'No scene available in given condition'
    else:
        print 'Found ' + str(
            len(products)
        ) + ' Scene\n', "Here is your list of Dates available and its product id"
    counter = 0
    for i in xrange(len(a)):  # in products:
        Result['Pr_Id'].append(a['uuid'][i])
        Result['Datetime'].append(a['beginposition'][i])
        Result['Geometry'].append(a['geometry'][i])
        Result['Identifier'].append(a['identifier'][i])
        print counter, ' ==> ', a['beginposition'][i], ' ==> ', a['uuid'][
            i], ' ==> ', a['cloudcoverpercentage'][i]
        counter += 1
    return Result
Exemplo n.º 5
0
 def Sen2Download(self, dprofile):
     download_profile_args = [
         dprofile.username,
         dprofile.password,
         dprofile.daysdiff,
         dprofile.shape_file_path,
         dprofile.download_dir,
         dprofile.concurrency,
     ]
     username, password, daysdiff, shape_file, directory_path, concurrency = download_profile_args
     logger.info(
         f'Sentinel-1 Downloads starting with dprofile = {dprofile}')
     api = SentinelAPI(username, password,
                       'https://scihub.copernicus.eu/dhus')
     #shapefileto wkt
     footprint = geojson_to_wkt(read_geojson(shape_file))
     #dates to search
     end_date = datetime.datetime.now()
     daysdiff = datetime.timedelta(days=daysdiff)
     start_date = end_date - daysdiff
     #Search for data
     products = api.query(footprint,
                          date=(start_date, end_date),
                          platformname='Sentinel-2',
                          producttype='S2MSI1C',
                          cloudcoverpercentage=(0, 30))
     self.DownloadProducts(self, products, dprofile)
Exemplo n.º 6
0
def get_products(login_json, coordinates, date_start, date_end, download_dir):
    with open(login_json, 'r') as fp:
        LOGIN_INFO = json.load(fp)
    USER_NAME, PASSWORD = list(LOGIN_INFO.values())

    # connect to the API
    api = SentinelAPI(USER_NAME, PASSWORD, 'https://scihub.copernicus.eu/dhus')

    # define a map polygon
    geojson = Polygon(coordinates=coordinates)
    # search by polygon, time, and Hub query keywords
    footprint = geojson_to_wkt(geojson)
    dates = (date_start, date_end)  # (date(2018, 4, 1), date(2018, 4, 11))

    # June to July maps
    products = api.query(
        footprint,
        date=dates,
        platformname='Sentinel-2',
        # producttype='S2MSI2A',
        area_relation='Intersects',  # area of interest is inside footprint
        cloudcoverpercentage=(0, 40))

    # download all results from the search
    api.download_all(products, directory_path=download_dir)
    # product_id = list(products.keys())[0]
    # api.download(id=product_id, directory_path=download_dir)

    # GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
    return api.to_geodataframe(products)
Exemplo n.º 7
0
def download_scenes(period):
    date_from = period.date_from
    date_to = period.date_to

    # Check if result has already been done
    scene_filename = 's1_{dfrom}_{dto}.tif'.format(
        dfrom=period.date_from.strftime('%Y%m'),
        dto=period.date_to.strftime('%Y%m'))
    scene_path = os.path.join(RESULTS_PATH, scene_filename)
    if os.path.exists(scene_path):
        print(
            "Sentinel-1 mosaic for period {}-{} already done:".format(
                date_from, date_to), scene_path)
        return

    # Prepare API client for download
    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    # Query scenes
    footprint = geojson_to_wkt(read_geojson(AOI_PATH))
    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-1',
                         producttype='GRD',
                         polarisationmode='VV VH',
                         orbitdirection='ASCENDING')

    for k, p in products.items():
        print((k, p['summary']))

    os.makedirs(S1_RAW_PATH, exist_ok=True)

    # Filter already downloaded products
    products_to_download = {
        k: v
        for k, v in products.items() if not os.path.exists(
            os.path.join(S1_RAW_PATH, '{}.zip'.format(v['title'])))
    }

    # Download products
    results = api.download_all(products_to_download,
                               directory_path=S1_RAW_PATH)
    products = list(products.values())

    # Process the images of each product
    with mp.Pool(settings.S1_PROC_NUM_JOBS) as pool:
        pool.map(process_product, products)

    # Create a median composite from all images of each band, generate extra
    # bands and concatenate results into a single multiband imafge.
    superimpose(products)
    median(products, period)
    generate_vvvh(period)
    concatenate_results(period)
    clip_result(period)

    clean_temp_files(period)
Exemplo n.º 8
0
    def query_bb(self, bb, ts0, ts1, ccrange=(0, 30)):
        """Search by polygon, time and cloud range

        return products
        """
        # search by polygon, time, and Hub query keywords
        if type(bb) is not tuple:
            raise TypeError(
                "Bounding box must be a tuple of min/max values of Lon/Lat")

        footprint = geojson_to_wkt(SentinelDL.geojson_bb(*bb))

        return self.api.query(footprint,
                              date=(ts0, ts1),
                              platformname=self.platform,
                              cloudcoverpercentage=ccrange)
Exemplo n.º 9
0
def getSentinelData(productID="",
                    footprint=footprint,
                    begin=start_date,
                    end=end_date,
                    platform=platform_name,
                    clouds=cloud_cover_percentage):
    if productID:
        api.download(productID)
        return
    fp = geojson_to_wkt(read_geojson(footprint))
    products = api.query(footprint,
                         date=(begin, end),
                         platformname=platform,
                         cloudcoverpercentage=clouds)
    api.download_all(products)
    return
def sentinel1_tile_download(file,username,password,tilepath):
    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')

    td = timedelta(days=60)
    endDate = datetime.now()
    startDate = endDate - td

    footprint = geojson_to_wkt(read_geojson(file))
    print(footprint)
    #products = api.query(footprint,
    #                     date=(startDate, endDate),platformname='Sentinel-1')
    products = api.query(footprint,
                         producttype='SLC',
                         orbitdirection='ASCENDING')
    # download all results from the search
    api.download_all(products,directorypath=tilepath)
    return
Exemplo n.º 11
0
    def search(self, start='NOW-7DAYS', end='NOW'):
        self._logger.info('Searching for new data sets')

        # loading search extend
        current_dir = os.path.dirname(__file__)
        extend_path = os.path.join(current_dir, "nrw.geojson")
        footprint = geojson_to_wkt(read_geojson(extend_path))
        try:
            return self.__api.query(area=footprint,
                                    initial_date=start,
                                    end_date=end,
                                    platformname='Sentinel-1',
                                    producttype='GRD')
        except SentinelAPIError:
            self._logger.error('There was an error searching for data sets',
                               exc_info=True)
            return {}
Exemplo n.º 12
0
def main(areapath, outfolder, start_date, end_date):
    # connect to the API
    api = SentinelAPI('javy', 'Javy9289', 'https://scihub.copernicus.eu/dhus')

    # search by polygon, time, and Hub query keywords
    # footprint = geojson_to_wkt(read_geojson('map.geojson'))
    # products = api.query(footprint,
    #                      date = ('20151219', date(2015, 12, 29)),
    #                      platformname = 'Sentinel-2',
                         # cloudcoverpercentage = (0, 30))

    footprint = geojson_to_wkt(read_geojson(areapath))
    products = api.query(footprint,
                         date = (start_date, end_date),
                         platformname = 'Sentinel-2')

    # download all results from the search
    api.download_all(products, outfolder, , checksum=False)
Exemplo n.º 13
0
def download():
    # search by polygon
    footprint = geojson_to_wkt(
        read_geojson(r"C:\Users\Ridene Safa\Desktop\task\task2\map.geojson"))
    print(footprint)
    print("Searching")
    products = api.query(
        footprint,
        date=('20200204', '20210206'),
        platformname='Sentinel-2',
        cloudcoverpercentage=(0, 30),
        #filename="*TCI_10m*",
        limit=1)
    print(len(products))
    print("Start downloading...")
    for i in products:
        #api.get_product_odata(i)
        api.download(i, SAVE_FOLDER)
Exemplo n.º 14
0
    def execute(self, context):
        log.info(context)
        log.info("#################")
        log.info("## DHUS Search ##")
        log.info('API URL: %s', self.dhus_url)
        log.info('API User: %s', self.dhus_user)
        #log.info('API Password: %s', self.dhus_pass)
        log.info('Start Date: %s', self.startdate)
        log.info('End Date: %s', self.enddate)
        log.info('Filter Max: %s', self.filter_max)
        log.info('Order By: %s', self.order_by)
        log.info('GeoJSON: %s', self.geojson_bbox)
        log.info('Keywords: %s', self.keywords)

        log.info('Now is: {}'.format(datetime.now()))
        log.info('6 hours ago was: {}'.format(datetime.now() -
                                              timedelta(hours=6)))

        print("Execute DHUS Search.. ")

        # search products
        api = SentinelAPI(self.dhus_user, self.dhus_pass, self.dhus_url)
        try:
            footprint = geojson_to_wkt(read_geojson(self.geojson_bbox))
        except:
            log.error('Cannot open GeoJSON file: {}'.format(self.geojson_bbox))
            return False

        products = api.query(area=footprint,
                             date=(self.startdate, self.enddate),
                             order_by=self.order_by,
                             limit=self.filter_max,
                             **self.keywords)

        log.info("Retrieving {} products:".format(len(products)))
        products_summary = "\n"
        for key, product in products.items():
            products_summary += 'ID: {}, {}\n'.format(key, product['summary'])
        log.info(products_summary)

        context['task_instance'].xcom_push(key='searched_products',
                                           value=products)
        return products
Exemplo n.º 15
0
def download_job(directory, directory2, api):

    for filename in os.listdir(directory):
        if filename.endswith(".geojson"):

            filename2 = os.path.join(directory, filename)
            print(filename2)
            os.chdir(directory2 + filename)
            footprint = geojson_to_wkt(read_geojson(filename2))
            products = api.query(footprint,
                                 date=("NOW-1HOUR", "NOW"),
                                 platformname='Sentinel-2')

            # for product in products:
            #     #print(product)

            # for columns in products:
            #     print(columns)
            #     for rows in products[columns]:
            #         print (rows,':',products[columns][rows])

            # #odata_prod = api.get_product_odata(product)
            products_df = api.to_dataframe(products)
            # #products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True])
            # #products_df_sorted = products_df_sorted.head(5)

            # products_df_sorted = products_df.sort_values(['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True])
            # products_df_sorted = products_df_sorted.head(2)

            now_time = datetime.datetime.now()
            products_df.to_csv("testy.csv")
            new_csv_name = str(now_time) + '.csv'
            os.rename('testy.csv', new_csv_name)

            api.download_all(products)
            # complete_name = os.path.join(directory2+filename, "testy.txt")
            # file1 = open(complete_name, "w")
            # toFile = str(products)
            # file1.write(toFile)
            # file1.close()

            print('Files last updated on ' + str(now_time))
Exemplo n.º 16
0
    def read_aoi(self, aoi):
        """Read AOI from GeoJSON file or directly from WKT.

        :param aoi: area of interest (WKT)

        :return str: WKT string
        """
        if re.search('Polygon\s*(.*)', aoi, re.IGNORECASE):
            # Fedeo is very pendatic, polygon must be uppercase
            return aoi.upper().replace('POLYGON ', 'POLYGON')

        try:
            # could be replaced by geojson + shapely
            from sentinelsat.sentinel import geojson_to_wkt, read_geojson
        except ImportError as e:
            Logger.critical("{} processor: {}".format(self.identifier, e))
            return None

        # GeoJSON
        return geojson_to_wkt(read_geojson(aoi))
Exemplo n.º 17
0
def sat_query_job(footprint, api, satnum, tile_num):

    footprint_rd = geojson_to_wkt(read_geojson(footprint))

    raw_query = ''
    if df.file_name is not None:
        raw_query = raw_query + 'filename:%s AND ' % df.file_name
    if df.product_type is not None:
        raw_query = raw_query + 'producttype:%s AND ' % df.product_type
    if df.platform_name is not None:
        raw_query = raw_query + 'platformname:%s AND ' % df.platform_name
    # if df.orbit_direction is not None:
    #     raw_query = raw_query + 'orbitdirection:%s AND ' % df.orbit_direction
    if df.polarisation_mode is not None:
        raw_query = raw_query + 'polarisationmode:%s AND ' % df.polarisation_mode
    if df.cloud_cover_percentage is not None:
        raw_query = raw_query + 'cloudcoverpercentage:%s AND ' % df.cloud_cover_percentage
    if df.sensor_operational_mode is not None:
        raw_query = raw_query + 'sensoroperationalmode:%s AND ' % df.sensor_operational_mode

    if satnum == 'S1A':
        raw_query = raw_query + 'filename:S1A* AND '
        raw_query = raw_query + 'orbitdirection:Descending AND '
    elif satnum == 'S1B':
        raw_query = raw_query + 'filename:S1B* AND '
        raw_query = raw_query + 'orbitdirection:Ascending AND '
    raw_query = raw_query[:-5]

    # search by polygon, time, and SciHub query keywords
    products = api.query(footprint_rd,
                         date=(df.start_date, df.end_date),
                         raw=raw_query)

    # print results from the search
    if df.printProducts:
        print "%d products found for " % len(products) + tile_num
        for product in products:
            print(products[product]['filename'])

    return products
Exemplo n.º 18
0
def query_copernicus_hub(aoi=None,
                         username='******',
                         password='******',
                         hub='https://s5phub.copernicus.eu/dhus',
                         **kwargs):
    """
    Query Copernicus Open access Hub.

    :param aoi: (str) Geojson Area of interest url
    :param username: (str) Username to use for API connection
    :param password: (str) Password to use for API connection
    :param hub: (str) Url of hub to query
    :param kwargs: (dict) extra keywords for the api.query function (see https://sentinelsat.readthedocs.io/en/stable/cli.html#sentinelsat)
    :return: (SentinelAPI, dict) API object and results of query
    """

    # connect to the API
    api = SentinelAPI(username, password, hub)

    # query database
    if aoi is None:
        products = api.query(**kwargs)
    else:
        # convert .geojson file
        footprint = geojson_to_wkt(read_geojson(aoi))
        products = api.query(footprint, **kwargs)

    # display results
    print(('Number of products found: {number_product}\n'
           'Total products size: {size:.2f} MB\n').format(
               number_product=len(products),
               size=sum([
                   float(products[uuid]['size'][:-3])
                   for uuid in products.keys()
               ])))

    return api, products
Exemplo n.º 19
0
def download(request):
    if request.method == 'POST':
        data = json.loads(request.body)
        footprint = geojson_to_wkt(geojson.loads(data['geoJson']))
        username = config.username  # ask ITC for the username and password
        password = config.password
        api = SentinelAPI(username, password,
                          "https://apihub.copernicus.eu/apihub/"
                          )  # fill with SMARTSeeds user and password
        tanggal = '[{0} TO {1}]'.format(data['dateFrom'].replace('.000Z', 'Z'),
                                        data['dateTo'].replace('.000Z', 'Z'))
        print(tanggal)
        products = api.query(footprint,
                             producttype=config.producttype,
                             orbitdirection=config.orbitdirection,
                             platformname='Sentinel-1',
                             date=tanggal)
        #menyimpan di folder sentineldata
        dirpath = os.getcwd() + '/sentineldata'
        for product in products:
            try:
                api.download(product, directory_path=dirpath, checksum=True)
            except:
                continue
        for item in os.listdir(dirpath):
            if item.endswith(".incomplete"):
                os.remove(os.path.join(dirpath, item))
        #fungsi notifikasi
        email = send_mail(  #libary django untuk mengirim email
            'Your Download was successful!',  #subject email
            'Terima kasih sudah menggunakan aplikasi webgis data yang anda unduh sudah masuk kedalam sistem website!',  #isi email
            settings.EMAIL_HOST_USER,  #email host pengirim notifikasi
            [request.user.email],  #email penerima notifikasi
            fail_silently=False,
        )
        return HttpResponse(request.body)
Exemplo n.º 20
0
def sen2_json_query(geojson_path, cloud, start_date, end_date, conf):
    """

    Parameters
    ----------
    geojson_path
    cloud
    start_date
    end_date
    conf

    Returns
    -------
    A dicitonary of products

    """
    api = SentinelAPI(conf["sen2"]["user"], conf["sen2"]["pass"],
                      'https://scihub.copernicus.eu/dhus')
    footprint = geojson_to_wkt(read_geojson(geojson_path))
    products = api.query(footprint,
                         platformname='Sentinel-2',
                         cloudcoverpercentage=(0, cloud),
                         date=(start_date, end_date))
    return products
Exemplo n.º 21
0
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path, checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Exemplo n.º 22
0
def cli(
    user,
    password,
    geometry,
    start,
    end,
    uuid,
    name,
    download,
    sentinel,
    producttype,
    instrument,
    cloud,
    footprints,
    path,
    query,
    url,
    order_by,
    limit,
):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            "Missing --user and --password. Please see docs "
            "for environment variables and .netrc support.")

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ["2", "3"]:
            logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split("=") for x in query))

    if geometry is not None:
        search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if "Invalid key" in e.msg:
                    logger.error("No product with ID '%s' exists on server",
                                 productid)
                    exit(1)
                else:
                    raise
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else "(" + " OR ".join(name) + ")"
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        if os.path.isdir(footprints):
            foot_path = os.path.join(footprints, "search_footprints.geojson")
        else:
            foot_path = "search_footprints.geojson"
        if path == ".":
            dump_path = os.path.join(os.getcwd(), foot_path)
        else:
            dump_path = os.path.join(path, foot_path)
        with open(dump_path, "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]["title"]))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info("Product %s - %s", product_id, props["summary"])
            else:  # querying uuids has no summary key
                logger.info(
                    "Product %s - %s - %s MB",
                    product_id,
                    props["title"],
                    round(int(props["size"]) / (1024.0 * 1024.0), 2),
                )
        if uuid is None:
            logger.info("---")
            logger.info(
                "%s scenes found with a total size of %.2f GB",
                len(products),
                api.get_products_size(products),
            )
Exemplo n.º 23
0
def search(user, password, geojson, start, end, download, md5, sentinel,
           producttype, instrument, sentinel1, sentinel2, cloud, footprints,
           path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs.update({"platformname": "Sentinel-" + sentinel})

    if instrument and not producttype:
        search_kwargs.update({"instrumentshortname": instrument})

    if producttype:
        search_kwargs.update({"producttype": producttype})

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError(
                'Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud})

    # DEPRECATED: to be removed with next major release
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    # DEPRECATED: to be removed with next major release
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    wkt = geojson_to_wkt(read_geojson(geojson))
    products = api.query(wkt, start, end, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"),
                  "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products,
                                                           path,
                                                           checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"),
                          "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write(
                            "%s : %s\n" %
                            (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            logger.info('Product %s - %s' % (product_id, props['summary']))
        logger.info('---')
        logger.info('%s scenes found with a total size of %.2f GB' %
                    (len(products), api.get_products_size(products)))
Exemplo n.º 24
0
geom = feat.GetGeometryRef()
print('Geometry of feature 1:', geom)

###############################################
# convert the shapefile to geojson
###############################################
gjfile = shapefile.split(".")[0] + ".geojson"
com = "ogr2ogr -f GeoJSON -t_srs crs:84 " + gjfile + " " + shapefile
flag = os.system(com)
if flag == 0:
    print('Shapefile converted to Geojson format: ' + gjfile)
else:
    print('Error converting shaoefile to Geojson')

# convert the geojson to wkt for the API search
footprint = geojson_to_wkt(read_geojson(gjfile))

# old code to open a geojson file directly
# with open(geojsonfile) as f:
#     polydata = gj.load(f)

###############################################
# search the ESA Sentinel data hub
###############################################

# set query parameters
query_kwargs = {
    'area': footprint,
    'platformname': 'Sentinel-1',
    'producttype': 'GRD',
    #        orbitdirection='ASCENDING'),
"""
import os
import geopandas as gpd
from sentinelsat.sentinel import SentinelAPI, read_geojson, geojson_to_wkt

#Defining the Area of interest (must be GeoJson)
workspace = 'C:/...../workspace'
AOI= os.path.join (workspace, "AOI.geojson")

#Setting connection parameters to the Sentinel Hub server
user = '******' ## change this!
password = '******' ## change this!
api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus')

#Setting the search footprint using a Geojson file
footprint = geojson_to_wkt(read_geojson(AOI))

#Setting the search query parameters
products = api.query(footprint,
                     date = ('20200101', '20200331'),
                     platformname = 'Sentinel-2',
                     processinglevel = 'Level-2A',
                     cloudcoverpercentage = (0, 20))

#Printing the number of products found
print("The number of products found is: {} " .format (len(products)))

#Creating a table with all the product search results
products_table = api.to_geodataframe(products)

#This part downloads the product(s) in the same folder where your code is located
Exemplo n.º 26
0
def find_sentinel_images(area_of_interest, date_start, date_end, platform_name, user, password,
                         datastore_base_path, download_path,
                         hub_address="https://scihub.copernicus.eu/apihub",
                         area_relation="Intersects", limit_to_tiles=[], other_search_keywords={},
                         limit_to_scenes=[], download=True, silent=False):

    def sprint(string):
        if not silent:
            print(string)
    
    ###################################
    identifiers = []
    products = {}
    product_paths = []

    sprint("Searching for scenes on "+hub_address)
    sprint(date_start+" - "+date_end)
    # search by polygon, time, and Hub query keywords
    file_name = []
    if limit_to_tiles:
        file_name = ["*_" + limit_to_tiles[i] + "_*" for i in range(len(limit_to_tiles))]
    file_name = file_name + limit_to_scenes
    if len(file_name) == 0:
       file_name = "*" 
    elif len(file_name) == 1:
        file_name = file_name[0]
    else:
        file_name = " OR ".join(file_name)
        file_name = "(" + file_name + ")"
        
    footprint = geojson_to_wkt(read_geojson(area_of_interest))
    products = _search_on_hub(user, password, hub_address, area=footprint,
                              area_relation=area_relation, date=(date_start, date_end),
                              platformname=platform_name, filename=file_name,
                              **other_search_keywords)
    products = _remove_duplicate_acquisitions(products)
    sprint("Found %i scenes" % len(products.keys()))
    for k in products.keys():
        identifiers.append(products[k]["identifier"])
        sprint(products[k]["identifier"])
    if not download:
        return list(products.values())

    ##################################
    # Then locate them in the IPT eodata store
    sprint("Locating scenes in eodata store...")
    for i, identifier in enumerate(identifiers):

        path = _search_on_datastore(datastore_base_path, identifier)
        # If they are not in the IPT eodata store (some S3 images are missing)
        # then download them and store in the download directory in case they
        # haven't been downloaded yet.
        if not path:
            if products:
                product = products[list(products.keys())[i]]
            else:
                product = _search_on_hub(user, password, hub_address, filename=identifier)
                if not product:
                    print("Product " + identifier + " does not exist and will not be downloaded!")
                    continue

            sprint("Scene not found in eodata store, downloading from "+hub_address+"...")
            path = _download_from_hub(product, download_path, user, password, hub_address, False)
            if not path:
                sprint("Could not download...")
                continue

        sprint(path)
        product_paths.append(path)

    return product_paths
Exemplo n.º 27
0
    proc_scene_dir = os.path.join(PROC_DIR, period_s)
    tci_path = os.path.join(proc_scene_dir, 'tci.tif')

    if os.path.exists(tci_path):
        logger.info("TCI file already generated at %s", tci_path)
        return tci_path

    if not settings.SCIHUB_USER or not settings.SCIHUB_PASS:
        raise "SCIHUB_USER and/or SCIHUB_PASS are not set. " + \
              "Please read the Configuration section on README."

    api = SentinelAPI(settings.SCIHUB_USER, settings.SCIHUB_PASS,
                      settings.SCIHUB_URL)

    extent = read_geojson(EXTENT_PATH)
    footprint = geojson_to_wkt(extent)
    logger.info(
        "Query S2MSI2A products with up to %d%% cloud cover from %s to %s",
        MAX_CLOUD_PERC, date_from, date_to)
    products = api.query(footprint,
                         date=(date_from, date_to),
                         platformname='Sentinel-2',
                         cloudcoverpercentage=(0, MAX_CLOUD_PERC),
                         producttype='S2MSI2A')
    logger.info("Found %d products", len(products))

    raw_dir = os.path.join(RAW_DIR, period_s)
    os.makedirs(raw_dir, exist_ok=True)

    # Filter already downloaded products
    products_to_download = {
Exemplo n.º 28
0
parser.add_argument('--geojson', type=str, default="map.geojson", metavar='N', help="footprint")
args = parser.parse_args()

print("loading id...")
data = json.load(open(args.data))

startdate = date(data["startdate"][0], data["startdate"][1], data["startdate"][2])
enddate = date(data["enddate"][0], data["enddate"][1], data["enddate"][2])


print("connecting to sentinel API...")
api = SentinelAPI(data["login"], data["password"], 'https://scihub.copernicus.eu/dhus')

# search by polygon, time, and SciHub query keywords
print("searching...")
footprint = geojson_to_wkt(read_geojson(args.geojson))
if args.sentinel == 1:
    products = api.query(footprint,
                         date=(startdate,enddate),
                         platformname = 'Sentinel-1',
                         producttype = "GRD"
                         )
elif args.sentinel == 2:
    products = api.query(footprint,
                        date=(startdate,enddate),
                         platformname = 'Sentinel-2'
                         )
print("  product number: ",len(products))
# download all results from the search
print("downloading...")
api.download_all(products)
Exemplo n.º 29
0
fecha_hasta = date(2019, 1, 1)

root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
aoi_path = os.path.join(root_path, 'data', 'aoi_4326.geojson')

fechas = (fecha_desde, fecha_hasta)

# connect to the API
api = SentinelAPI(os.getenv("USUARIO"), os.getenv("PASSWORD"),
                  'https://scihub.copernicus.eu/dhus')

# download single scene by known product id
#api.download(<product_id>)

# search by polygon, time, and Hub query keywords
footprint = geojson_to_wkt(read_geojson(aoi_path))

products = api.query(footprint,
                     date=fechas,
                     platformname='Sentinel-2',
                     cloudcoverpercentage=(0, 100))
print(products)

# download all results from the search
result = api.download_all(products)
print(result)

# GeoJSON FeatureCollection containing footprints and metadata of the scenes
#api.to_geojson(products)

# GeoPandas GeoDataFrame with the metadata of the scenes and the footprints as geometries
Exemplo n.º 30
0
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Exemplo n.º 31
0
def search(
        user, password, geojson, start, end, download, md5, sentinel, producttype,
        instrument, sentinel1, sentinel2, cloud, footprints, path, query, url):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your SciHub user and password, you must pass a geojson file
    containing the polygon of the area you want to search for. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs.update({"platformname": "Sentinel-" + sentinel})

    if instrument and not producttype:
        search_kwargs.update({"instrumentshortname": instrument})

    if producttype:
        search_kwargs.update({"producttype": producttype})

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs.update({"cloudcoverpercentage": "[0 TO %s]" % cloud})

    # DEPRECATED: to be removed with next major release
    elif sentinel2:
        search_kwargs.update({"platformname": "Sentinel-2"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    # DEPRECATED: to be removed with next major release
    elif sentinel1:
        search_kwargs.update({"platformname": "Sentinel-1"})
        logger.info('DEPRECATED: Please use --sentinel instead')

    if query is not None:
        search_kwargs.update(dict([i.split('=') for i in query.split(',')]))

    wkt = geojson_to_wkt(read_geojson(geojson))
    products = api.query(wkt, start, end, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path, checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            logger.info('Product %s - %s' % (product_id, props['summary']))
        logger.info('---')
        logger.info(
            '%s scenes found with a total size of %.2f GB' %
            (len(products), api.get_products_size(products)))
Exemplo n.º 32
0
import os
import requests
import json
from sentinelsat.sentinel import read_geojson, geojson_to_wkt
from pandas.io.json import json_normalize

#set wd

os.chdir(
    'D:/OneDrive/Documents/Work/CloudFerro/Projects/DIAS_CAP/DataSearch/WRLD_190814'
)

# import json
geom = geojson_to_wkt(read_geojson('north.geojson'))

#set search parameters, change as desired
#example S2
args = {
    'collection': 'Sentinel2',
    'product': 'LEVEL2A',
    'startDate': '2019-01-01',
    'completionDate': '2019-12-31',
    #'geometry': geom,
    'status': '34'
    #'status': '31|32'
    #'status': 'all'
}

args = {
    'collection': 'Sentinel3',
    'product': 'LEVEL1',