def setUpClass(cls): super().setUpClass() cls.request = AwsTileRequest(data_folder=cls.OUTPUT_FOLDER, bands='B01, B05', metafiles='metadata,tileInfo, productInfo, qi/MSK_TECQUA_B04, auxiliary/ECMWFT ', tile='10UEV', time='2016-01-09', aws_index=0, data_collection=DataCollection.SENTINEL2_L1C) cls.data = cls.request.get_data(redownload=True, data_filter=[0] + list(range(2, 7)))
def fetch_s2_to_safe(tile_id: str) -> Path: """ Fetches S2 granule by tile ID from AWS S3 to .SAFE format. Does not overwrite or re-download existing data. :param tile_id: S2 granule tile ID :returns Path of fetched data """ tile_name, time, aws_index = AwsTile.tile_id_to_tile(tile_id) base_output_path = Path(os.environ["CFSI_OUTPUT_CONTAINER"]).joinpath("cache/safe") request = AwsTileRequest(tile=tile_name, time=time, aws_index=aws_index, data_folder=base_output_path, data_collection=DataCollection.SENTINEL2_L1C, safe_format=True) LOGGER.info(f"Fetching .SAFE for {tile_id}") request.save_data() tile_output_directory = Path(request.get_filename_list()[0]).parts[0] return base_output_path.joinpath(tile_output_directory)
def _get_file_ref(self, data_set_meta_info: DataSetMetaInfo, bands=None, metafiles=None) -> Optional[FileRef]: """auxiliary method to delimit the number of downloaded files for testing""" if not self._is_valid_identifier(data_set_meta_info.identifier): # consider throwing an exception return None from sentinelhub import AwsTileRequest tile_name = self._get_tile_name(data_set_meta_info.identifier) start_time_as_datetime = get_time_from_string( data_set_meta_info.start_time) time = start_time_as_datetime.strftime('%Y-%m-%d') aws_index = self._get_aws_index(data_set_meta_info.identifier) request = AwsTileRequest(tile=tile_name, time=time, aws_index=aws_index, bands=bands, metafiles=metafiles, data_folder=self._temp_dir) year = start_time_as_datetime.year month = start_time_as_datetime.month day = start_time_as_datetime.day logging.info('Downloading S2 Data from {}-{}-{}'.format( month, day, year)) request.save_data() saved_dir = '{}/{},{}-{:02d}-{:02d},{}/'.format( self._temp_dir, tile_name, year, month, day, aws_index) new_dir = '{0}/{1}/{2}/{3}/{4}/{5}/{6}/{7}/'.format( self._temp_dir, tile_name[0:2], tile_name[2:3], tile_name[3:5], year, month, day, aws_index) copy_tree(saved_dir, new_dir) logging.info('Downloaded S2 Data from {}-{}-{}'.format( month, day, year)) return FileRef(new_dir, data_set_meta_info.start_time, data_set_meta_info.end_time, get_mime_type(new_dir))
def setUpClass(cls): super().setUpClass() cls.test_cases = [cls.SafeTestCase('L1C_02.01', AwsProductRequest('S2A_OPER_PRD_MSIL1C_PDMC_20151218T020842_R115_V20151217T2' '24602_20151217T224602', bands=AwsConstants.S2_L1C_BANDS, metafiles=AwsConstants.S2_L1C_METAFILES, tile_list=['T59HNA'], safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.01_tile', AwsTileRequest(tile='29KQB', time='2016-04-12', aws_index=None, data_source=DataSource.SENTINEL2_L1C, safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.02', AwsProductRequest('S2A_OPER_PRD_MSIL1C_PDMC_20160606T232310_R121_V20160526T0' '84351_20160526T084351.SAFE', tile_list=['34HCF'], safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.04_old', AwsProductRequest('S2A_OPER_PRD_MSIL1C_PDMC_20160910T174323_R071_V20160701T2' '04642_20160701T204643', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.04', AwsProductRequest('S2A_MSIL1C_20170413T104021_N0204_R008_T31SCA_20170413T104' '021', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.05', AwsProductRequest('S2A_MSIL1C_20171012T112111_N0205_R037_T29SQC_20171012T112' '713', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L1C_02.06', AwsProductRequest('S2A_MSIL1C_20180331T212521_N0206_R043_T07WFR_20180401T005' '612', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L2A_02.01', AwsProductRequest('S2A_USER_PRD_MSIL2A_PDMC_20160310T041843_R138_V20160308T1' '31142_20160308T131142', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L2A_02.05', # L2A_02.04 is the same AwsProductRequest('S2A_MSIL2A_20170827T105651_N0205_R094_T31WFN_20170827T105' '652', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L2A_02.06', AwsProductRequest('S2B_MSIL2A_20180216T102059_N0206_R065_T35VLL_20180216T122' '659', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase('L2A_02.07', AwsProductRequest('S2A_MSIL2A_20180402T151801_N0207_R068_T33XWJ_20180402T202' '222', safe_format=True, data_folder=cls.INPUT_FOLDER))]
def setUpClass(cls): super().setUpClass() cls.test_cases = [ cls.SafeTestCase( 'L1C_02.01', AwsProductRequest( 'S2A_OPER_PRD_MSIL1C_PDMC_20151218T020842_R115_V20151217T224602_' '20151217T224602', bands=AwsConstants.S2_L1C_BANDS, metafiles=AwsConstants.S2_L1C_METAFILES, tile_list=['T59HNA'], safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.01_tile', AwsTileRequest(tile='29KQB', time='2016-04-12', aws_index=None, data_collection=DataCollection.SENTINEL2_L1C, safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.02', AwsProductRequest( 'S2A_OPER_PRD_MSIL1C_PDMC_20160606T232310_R121_V20160526T0' '84351_20160526T084351.SAFE', tile_list=['34HCF'], safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.04_old', AwsProductRequest( 'S2A_OPER_PRD_MSIL1C_PDMC_20160910T174323_R071_V20160701T2' '04642_20160701T204643', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.04', AwsProductRequest( 'S2A_MSIL1C_20170413T104021_N0204_R008_T31SCA_20170413T104021', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.05', AwsProductRequest( 'S2A_MSIL1C_20171012T112111_N0205_R037_T29SQC_20171012T112713', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.06', AwsProductRequest( 'S2A_MSIL1C_20180331T212521_N0206_R043_T07WFR_20180401T005612', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.06_2', AwsProductRequest( 'S2A_MSIL1C_20181004T175151_N0206_R141_T18XVM_20190219T160358', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.07', AwsProductRequest( 'S2A_MSIL1C_20181119T031011_N0207_R075_T50TLK_20181119T061056', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L1C_02.07_2', AwsProductRequest( 'S2A_MSIL1C_20190129T143751_N0207_R096_T20LLK_20190225T132350', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.01', AwsProductRequest( 'S2A_USER_PRD_MSIL2A_PDMC_20160310T041843_R138_V20160308T131142_' '20160308T131142', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.05', # L2A_02.04 is the same AwsProductRequest( 'S2A_MSIL2A_20170827T105651_N0205_R094_T31WFN_20170827T105652', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.06', AwsProductRequest( 'S2B_MSIL2A_20180216T102059_N0206_R065_T35VLL_20180216T122659', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.07', AwsProductRequest( 'S2A_MSIL2A_20180402T151801_N0207_R068_T33XWJ_20180402T202222', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.08', AwsProductRequest( 'S2A_MSIL2A_20181005T104021_N0208_R008_T34WEU_20181007T220806', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.09', AwsProductRequest( 'S2B_MSIL2A_20181029T093109_N0209_R136_T35UMQ_20181029T122414', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.10', AwsProductRequest( 'S2B_MSIL2A_20181115T110319_N0210_R094_T32VLJ_20181115T142501', safe_format=True, data_folder=cls.INPUT_FOLDER)), cls.SafeTestCase( 'L2A_02.11', AwsProductRequest( 'S2B_MSIL2A_20190310T235739_N0211_R030_T57MVM_20190311T013927', safe_format=True, data_folder=cls.INPUT_FOLDER)) ]
GRDrequest = AwsProductRequest(product_id=id, data_folder='../DATA/Sentinel1/GRD/', config=config, safe_format=True) GRDrequest.save_data() # save's data to disk (this stage works) # download the sentinel 2 data for id in s2list: L2Arequest = AwsProductRequest(product_id=id, data_folder='../DATA/Sentinel2/L2A/', config=config, safe_format=True) L2Arequest.save_data() # get tile and date information to retrieve corresponing L1C tile tile = id.split('_')[5] year = id.split('_')[6][:4] month = id.split('_')[6][4:6] day = id.split('_')[6][6:8] L1Crequest = AwsTileRequest(tile=tile, time='%s-%s-%s' % (year, month, day), data_folder='../DATA/Sentinel2/L1C/', data_source=DataSource.SENTINEL2_L1C, config=config, safe_format=True) L1Crequest.save_data() """ # command line script to run cloud masking with fmask fmask_sentinel2Stacked.py -o cloud.img --safedir S2B_MSIL1C_20180918T235239_N0206_R130_T56JNQ_20180919T011001.SAFE """
print(tile_info) """ DOWNLOAD DATA - once we have found correct tiles or products The AWS index is the last number in the AWS path """ tile_id = 'S2A_OPER_MSI_L1C_TL_MTI__20151219T100121_A002563_T38TML_N02.01' tile_name, time, aws_index = AwsTile.tile_id_to_tile(tile_id) print(tile_name, time, aws_index) bands_to_download = ['B8A', 'B10'] metafiles = ['tileInfo', 'preview', 'qi/MSK_CLOUDS_B00'] data_folder = './AwsData' request = AwsTileRequest( tile=tile_name, time=time, aws_index=aws_index, bands=bands_to_download, metafiles=metafiles, data_folder=data_folder, data_collection=DataCollection.SENTINEL2_L1C ) request.save_data() # rerunning code won't redownload data unless redownload=True data_list = request.get_data() # won't redownload since data is already stored on disk b8a, b10, tile_info, preview, cloud_mask = data_list
token_url='https://services.sentinel-hub.com/oauth/token', client_id=client_id, client_secret=client_secret) # All requests using this session will have an access token automatically added resp = oauth.get("https://services.sentinel-hub.com/oauth/tokeninfo") ## This is the code used to get a specified tile and date config = SHConfig( ) # config is a SHconfig object that contains the aws credentials etc: https://sentinelhub-py.readthedocs.io/en/latest/config.html config.aws_access_key_id = '<YOUR_ACCESS_KEY_ID>' config.aws_secret_access_key = '<YOUR_SECRET_ACCESS_KEY>' L1Crequest = AwsTileRequest(tile='30VUJ', time='2019-7-15', aws_index=0, data_folder='../DATA/Sentinel2/L1C/T30VUJ/', data_source=DataSource.SENTINEL2_L1C, config=config, safe_format=True) L1Crequest.save_data() # save's data to disk (this stage works) L2Arequest = AwsTileRequest(tile='30VUJ', time='2019-7-15', aws_index=0, data_folder='../DATA/Sentinel2/L2A/T30VUJ/', data_source=DataSource.SENTINEL2_L2A, config=config, safe_format=True) L2Arequest.save_data() # save's data to disk (this stage works) """ # command line script to run cloud masking with fmask
aux = element['properties']['productIdentifier'] tile_name, time, aws_index = AwsTile.tile_id_to_tile(aux) bands = ['R10m/B02', 'R10m/B03', 'R10m/B04', 'R10m/B08', 'R20m/B05', 'R20m/B06', 'R20m/B07', 'R20m/B11', 'R20m/B12', 'R60m/B01', 'R60m/B09'] for banda in bands: metafiles = ['tileInfo'] data_folder = './AwsData' try: request = AwsTileRequest(tile=tile_name, time=time, aws_index=aws_index, bands=banda, metafiles=metafiles, data_folder=data_folder, data_source=DataSource.SENTINEL2_L2A) except: errort = 1 print('Salto') break request.save_data() try: bandita,tileinfo = request.get_data() except: errort = 2 print('Salto') break
def get_image( footprint, date, search_window, destination, bands=["B03", "B11"], choose=False ): """ Retrieves sentinel-2 scenes. Scenes must entirely contain <footprint> within date +/- search_window. Downloads <bands> to <dir>/sentinel2. Requires SCIHUB_USERNAME and SCIHUB_PASSWORD environment variables. """ print("Searching for Sentinel-2 Imagery...") workdir = path.join(destination, "sentinel-2") makedirs(workdir, exist_ok=True) api = SentinelAPI( environ["SCIHUB_USERNAME"], environ["SCIHUB_PASSWORD"], "https://scihub.copernicus.eu/dhus", ) footprint_wkt = geojson_to_wkt(read_geojson(path.join(destination, footprint))) footprint_geom = wkt.loads(footprint_wkt) date_window = timedelta(days=search_window) date_range = (date - date_window, date + date_window) q = api.query(footprint_wkt, date=date_range, platformname="Sentinel-2") results = api.to_dataframe(q) # filter results does_overlap = [ wkt.loads(i_fp).contains(footprint_geom) for i_fp in results.footprint ] results = results[does_overlap] print("Overlapping scenes: {}".format(len(results))) results.to_csv(path.join(workdir, "s2-collects.csv")) # choose result that's closest in time results["timedeltas"] = (date - results.datatakesensingstart).abs() results = results.sort_values(by="timedeltas", ascending=True) image_iloc = 0 if choose: print( tabulate( results[ ["datatakesensingstart", "cloudcoverpercentage", "timedeltas"] ].reset_index(drop=True), headers="keys", ) ) image_iloc = int(input("Choose image ID [0-{}]: ".format(len(results) - 1))) # build request for AWS data. tile_name, time, aws_index = AwsTile.tile_id_to_tile( results.iloc[image_iloc].level1cpdiidentifier ) metafiles = ["tileInfo", "preview"] request = AwsTileRequest( tile=tile_name, time=time, aws_index=aws_index, bands=bands, metafiles=metafiles, data_folder=workdir, data_source=DataSource.SENTINEL2_L1C, ) if input("Download? (y/n): ").lower() == "y": request.save_data() else: print("Aborted.") return None dateparts = time.split("-") zero_pad_date = "{:d}-{:02d}-{:d}".format( int(dateparts[0]), int(dateparts[1]), int(dateparts[2]) ) imgpath = path.join( workdir, ",".join([str(tile_name), zero_pad_date, str(aws_index)]) ) print(imgpath) return imgpath
search_bbox = BBox(bbox=[46.16, -16.15, 46.51, -15.58], crs=CRS.WGS84) search_time_interval = ('2017-12-01T00:00:00', '2017-12-15T23:59:59') wfs_iterator = WebFeatureService( search_bbox, search_time_interval, data_source=DataSource.LANDSAT8, instance_id='f438079a-b072-4285-b154-037b0627d223') request = AwsProductRequest(product_id=x[0]['properties']['id'], data_folder='') tt = time.time() metafiles = [] for (tile_name, t_time, aws_index) in wfs_iterator.get_tiles(): tile_request = AwsTileRequest( tile=tile_name, time=t_time, aws_index=aws_index, data_folder=data_folder) #metafiles=metafiles tile_request.save_data() print(tt - time.time()) request = AwsProductRequest(product_id=product_id, data_folder=data_folder) tile_id = 'S2A_OPER_MSI_L1C_TL_MTI__20151219T100121_A002563_T38TML_N02.01' tile_name, time, aws_index = sentinelhub.AwsTile.tile_id_to_tile(tile_id) from sentinelhub import WebFeatureService, BBox, CRS, DataSource from sentinelhub import AwsTile tile_id = 'S2A_OPER_MSI_L1C_TL_MTI__20151219T100121_A002563_T38TML_N02.01' tile_name, time, aws_index = AwsTile.tile_id_to_tile(tile_id) tile_name, time, aws_index
import gbdxtools from sentinelhub import get_area_info, BBox, CRS, AwsTile, AwsTileRequest gbdx = gbdxtools.Interface() # Define search AOI and time period search_bbox = BBox(bbox=[-95.9770, 40.9776, -95.7312, 41.0975], crs=CRS.WGS84) search_time_interval = ('2019-03-17T00:00:00', '2019-03-21T23:59:59') # Search for products # Testing something different print("Searching for Sentinel-2 scenes") product_ids = [] for tile_info in get_area_info(search_bbox, search_time_interval, maxcc=0.5): tile_name, time, aws_index = AwsTile.tile_id_to_tile( tile_info['properties']['productIdentifier']) tile_request = AwsTileRequest(tile=tile_name, time=time, aws_index=aws_index, bands=None) safe_tile = tile_request.get_aws_service() product_id = safe_tile.get_product_id() product_ids.append(product_id) print("{n} Sentinel-2 Scenes found..".format(n=len(product_ids))) for product_id in product_ids: print("\tProduct ID: {}".format(product_id))
# check for duplicate images and filter these datainfo = datainfo.drop_duplicates(subset='productIdentifier') datainfo.index = np.arange(0,len(datainfo)) datainfo['datacoveragepct'] = np.nan datainfo['cloudpixelpct'] = np.nan # collect metadata for each image for i in range(len(datainfo)): try: tile_id = datainfo.productIdentifier[i] tile_name, time, aws_index = AwsTile.tile_id_to_tile(tile_id) request = AwsTileRequest( tile = tile_name, time = time, aws_index = aws_index, bands=[''], metafiles = ['tileInfo'], data_source = DataSource.SENTINEL2_L2A) infos = request.get_data() datainfo['datacoveragepct'][datainfo.productIdentifier == datainfo.productIdentifier[i]] = infos[0]['dataCoveragePercentage'] datainfo['cloudpixelpct'][datainfo.productIdentifier == datainfo.productIdentifier[i]] = infos[0]['cloudyPixelPercentage'] except : pass # filter to include only S2 images with coverage above a specified % thredhold datainfo = datainfo[datainfo.datacoveragepct > 95] datainfo = datainfo.dropna(subset=['datacoveragepct','datacoveragepct']) datainfo.index = np.arange(0,len(datainfo)) # download data @10m # - Band 2 (Blue)