def set_sh_config(config: BaseConfig) -> SHConfig: """ Set AWS and SH credentials in SHConfig file to allow usage of download and io tasks """ sh_config = SHConfig() sh_config.aws_access_key_id = config.aws_access_key_id sh_config.aws_secret_access_key = config.aws_secret_access_key if all(key in config.__annotations__.keys() for key in ['sh_client_id', 'sh_client_secret']): sh_config.sh_client_id = config.sh_client_id sh_config.sh_client_secret = config.sh_client_secret sh_config.save() return sh_config
def download_from_aws(scene_id: str, destination: str): """Download the Sentinel Scene from AWS. It uses the library `sentinelhub-py <https://sentinelhub-py.readthedocs.io>`_ to download the Sentinel-2 SAFE folder. Once downloaded, it compressed into a `zip`. Notes: Make sure to set both `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` in environment variable. This method does not raise Exception. Args: scene_id - Sentinel-2 Product Id (We call as scene_id) destination - Path to store data. We recommend to use python `tempfile.TemporaryDirectory` and then move. Returns: Path to the downloaded file when success or None when an error occurred. """ try: config = SHConfig() config.aws_access_key_id = Config.AWS_ACCESS_KEY_ID config.aws_secret_access_key = Config.AWS_SECRET_ACCESS_KEY logging.info(f'Downloading {scene_id} From AWS...') request = AwsProductRequest(product_id=scene_id, data_folder=destination, safe_format=True, config=config) _ = request.get_data(save_data=True) file_name = '{}.SAFE'.format(scene_id) logging.info(f'Compressing {scene_id}.SAFE...') with working_directory(destination): shutil.make_archive(base_dir=file_name, format='zip', base_name=scene_id) return Path(destination) / file_name except BaseException as e: logging.error(f'Error downloading from AWS. {scene_id} - {str(e)}') return None
def test_s3_filesystem(self): folder_name = 'my_folder' s3_url = 's3://test-eo-bucket/{}'.format(folder_name) filesystem = get_filesystem(s3_url) self.assertTrue(isinstance(filesystem, S3FS)) self.assertEqual(filesystem.dir_path, folder_name) custom_config = SHConfig() custom_config.aws_access_key_id = 'fake-key' custom_config.aws_secret_access_key = 'fake-secret' filesystem = load_s3_filesystem(s3_url, strict=False, config=custom_config) self.assertTrue(isinstance(filesystem, S3FS)) self.assertEqual(filesystem.aws_access_key_id, custom_config.aws_access_key_id) self.assertEqual(filesystem.aws_secret_access_key, custom_config.aws_secret_access_key)
def test_s3_filesystem(aws_session_token): folder_name = "my_folder" s3_url = f"s3://test-eo-bucket/{folder_name}" filesystem = get_filesystem(s3_url) assert isinstance(filesystem, S3FS) assert filesystem.dir_path == folder_name custom_config = SHConfig() custom_config.aws_access_key_id = "fake-key" custom_config.aws_secret_access_key = "fake-secret" custom_config.aws_session_token = aws_session_token filesystem1 = load_s3_filesystem(s3_url, strict=False, config=custom_config) filesystem2 = get_filesystem(s3_url, config=custom_config) for filesystem in [filesystem1, filesystem2]: assert isinstance(filesystem, S3FS) assert filesystem.aws_access_key_id == custom_config.aws_access_key_id assert filesystem.aws_secret_access_key == custom_config.aws_secret_access_key assert filesystem.aws_session_token == aws_session_token
client = BackendApplicationClient(client_id=client_id) oauth = OAuth2Session(client=client) # Get token for the session token = oauth.fetch_token( token_url='https://services.sentinel-hub.com/oauth/token', client_id=client_id, client_secret=client_secret) # All requests using this session will have an access token automatically added resp = oauth.get("https://services.sentinel-hub.com/oauth/tokeninfo") ## This is the code used to get a specified tile and date config = SHConfig( ) # config is a SHconfig object that contains the aws credentials etc: https://sentinelhub-py.readthedocs.io/en/latest/config.html config.aws_access_key_id = '<YOUR_ACCESS_KEY_ID>' config.aws_secret_access_key = '<YOUR_SECRET_ACCESS_KEY>' ## Now load in the lists of tiles sites = [ 'Ardfern1', 'Ardfern2', 'Arisaig', 'Auchteraw', 'GlenLoy', 'Mandally', 'Achdalieu' ] s1list = [] s2list = [] for site in sites: s1scenes = np.load('query_results/%s_query_results_sentinel_1.npz' % site, allow_pickle=True)['arr_0'][()] for scene in list(s1scenes.keys()): s1list.append(s1scenes[scene]['title']) s2scenes = np.load('query_results/%s_query_results_sentinel_2.npz' % site,